Ejemplo n.º 1
0
/* Called after videoaggregator fixates our caps */
static gboolean
_negotiated_caps (GstAggregator * agg, GstCaps * caps)
{
  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
  GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
  GstCaps *in_caps;

  GST_LOG_OBJECT (mix, "Configured output caps %" GST_PTR_FORMAT, caps);

  if (GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps)
    if (!GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps (agg, caps))
      return FALSE;

  /* Update the glview_convert output */

  /* We can configure the view_converter now */
  gst_gl_view_convert_set_context (mix->viewconvert,
      GST_GL_BASE_MIXER (mix)->context);

  in_caps = gst_video_info_to_caps (&mix->mix_info);
  gst_caps_set_features (in_caps, 0,
      gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));
  gst_caps_set_simple (in_caps, "texture-target", G_TYPE_STRING,
      GST_GL_TEXTURE_TARGET_2D_STR, NULL);

  gst_gl_view_convert_set_caps (mix->viewconvert, in_caps, caps);

  return TRUE;
}
Ejemplo n.º 2
0
GstCaps *
gst_edt_pdv_sink_get_caps (GstBaseSink * basesink, GstCaps * filter_caps)
{
  GstEdtPdvSink *pdvsink = GST_EDT_PDV_SINK (basesink);
  gint width, height, depth;
  GstVideoFormat format;
  GstVideoInfo vinfo;

  if (!pdvsink->dev) {
    return gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SINK_PAD
            (pdvsink)));
  }

  gst_video_info_init (&vinfo);
  width = pdv_get_width (pdvsink->dev);
  height = pdv_get_height (pdvsink->dev);
  depth = pdv_get_depth (pdvsink->dev);

  switch (depth) {
    case 8:
      format = GST_VIDEO_FORMAT_GRAY8;
      break;
    case 16:
      /* TODO: will this be host order or always one of BE/LE? */
      format = GST_VIDEO_FORMAT_GRAY16_BE;
      break;
    default:
      format = GST_VIDEO_FORMAT_UNKNOWN;
  }
  gst_video_info_set_format (&vinfo, format, width, height);

  /* TODO: handle filter_caps */
  return gst_video_info_to_caps (&vinfo);
}
static GstCaps *
gst_dshowvideosrc_getcaps_from_enum_mediatypes (GstDshowVideoSrc * src, IPin * pin)
{
    GstCaps *caps = NULL;
    IEnumMediaTypes *enum_mediatypes = NULL;
    HRESULT hres = S_OK;
    GstCapturePinMediaType *pin_mediatype = NULL;

    hres = pin->EnumMediaTypes (&enum_mediatypes);
    if (FAILED (hres)) {
        GST_ERROR ("Failed to retrieve IEnumMediaTypes (error=0x%x)", hres);
        return NULL;
    }

    caps = gst_caps_new_empty ();

    while ((pin_mediatype = gst_dshow_new_pin_mediatype_from_enum_mediatypes (pin, enum_mediatypes)) != NULL) {

        GstCaps *mediacaps = NULL;
        GstVideoFormat video_format = gst_dshow_guid_to_gst_video_format (pin_mediatype->mediatype);
        GstVideoInfo info;

        gst_video_info_init(&info);
        gst_video_info_set_format(&info, video_format, pin_mediatype->defaultWidth, pin_mediatype->defaultHeight);
        info.fps_n = pin_mediatype->defaultFPS;
        info.fps_d = 1;
        info.par_n = 1;
        info.par_d = 1;

        if(video_format == GST_VIDEO_FORMAT_ENCODED) {
            if(gst_dshow_check_mediatype(pin_mediatype->mediatype, MEDIASUBTYPE_MJPG, FORMAT_VideoInfo)) {
                mediacaps = gst_caps_new_simple("image/jpeg", "width", G_TYPE_INT, info.width,
                                                "height", G_TYPE_INT, info.height,
                                                "framerate", GST_TYPE_FRACTION, info.fps_n, info.fps_d);
            }
        }
        else if (video_format != GST_VIDEO_FORMAT_UNKNOWN)
            mediacaps = gst_video_info_to_caps (&info);

        if (mediacaps) {
            src->pins_mediatypes =
                g_list_append (src->pins_mediatypes, pin_mediatype);
            gst_caps_append (caps, mediacaps);
        } else {
            /* failed to convert dshow caps */
            gst_dshow_free_pin_mediatype (pin_mediatype);
        }
    }

    enum_mediatypes->Release ();

    if (caps && gst_caps_is_empty (caps)) {
        gst_caps_unref (caps);
        caps = NULL;
    }

    return caps;
}
static gboolean
gst_dvdec_src_negotiate (GstDVDec * dvdec)
{
  GstCaps *othercaps;
  gboolean ret;

  /* no PAR was specified in input, derive from encoded data */
  if (dvdec->need_par) {
    if (dvdec->PAL) {
      if (dvdec->wide) {
        dvdec->par_x = PAL_WIDE_PAR_X;
        dvdec->par_y = PAL_WIDE_PAR_Y;
      } else {
        dvdec->par_x = PAL_NORMAL_PAR_X;
        dvdec->par_y = PAL_NORMAL_PAR_Y;
      }
    } else {
      if (dvdec->wide) {
        dvdec->par_x = NTSC_WIDE_PAR_X;
        dvdec->par_y = NTSC_WIDE_PAR_Y;
      } else {
        dvdec->par_x = NTSC_NORMAL_PAR_X;
        dvdec->par_y = NTSC_NORMAL_PAR_Y;
      }
    }
    GST_DEBUG_OBJECT (dvdec, "Inferred PAR %d/%d from video format",
        dvdec->par_x, dvdec->par_y);
  }

  /* ignoring rgb, bgr0 for now */
  dvdec->bpp = 2;

  gst_video_info_set_format (&dvdec->vinfo, GST_VIDEO_FORMAT_YUY2,
      720, dvdec->height);
  dvdec->vinfo.fps_n = dvdec->framerate_numerator;
  dvdec->vinfo.fps_d = dvdec->framerate_denominator;
  dvdec->vinfo.par_n = dvdec->par_x;
  dvdec->vinfo.par_d = dvdec->par_y;
  if (dvdec->interlaced) {
    dvdec->vinfo.interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
  } else {
    dvdec->vinfo.interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
  }

  othercaps = gst_video_info_to_caps (&dvdec->vinfo);
  ret = gst_pad_set_caps (dvdec->srcpad, othercaps);

  gst_dvdec_negotiate_pool (dvdec, othercaps, &dvdec->vinfo);
  gst_caps_unref (othercaps);

  dvdec->src_negotiated = TRUE;

  return ret;
}
GstCaps *
gst_dshowvideosrc_getcaps_from_enum_mediatypes (IPin * pin, GList ** pins_mediatypes)
{
  GstCaps *caps = NULL;
  IEnumMediaTypes *enum_mediatypes = NULL;
  HRESULT hres = S_OK;
  GstCapturePinMediaType *pin_mediatype = NULL;

  hres = pin->EnumMediaTypes (&enum_mediatypes);
  if (FAILED (hres)) {
    GST_ERROR ("Failed to retrieve IEnumMediaTypes (error=0x%x)", hres);
    return NULL;
  }

  caps = gst_caps_new_empty ();

  while ((pin_mediatype = gst_dshow_new_pin_mediatype_from_enum_mediatypes (pin, enum_mediatypes)) != NULL) {

    GstCaps *mediacaps = NULL;
    GstVideoFormat video_format = gst_dshow_guid_to_gst_video_format (pin_mediatype->mediatype);

	if (video_format != GST_VIDEO_FORMAT_UNKNOWN) {
		GstVideoInfo info;

		gst_video_info_init(&info);
		gst_video_info_set_format(&info, video_format, pin_mediatype->defaultWidth, pin_mediatype->defaultHeight);
		info.fps_n = pin_mediatype->defaultFPS;
		info.fps_d = 1;
		info.par_n = 1;
		info.par_d = 1;
		info.interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; /* XXX is this correct ? */
		mediacaps = gst_video_info_to_caps(&info);
	}

    if (mediacaps) {
      if (pins_mediatypes != NULL) {
        *pins_mediatypes = g_list_append (*pins_mediatypes, pin_mediatype);
      }
      gst_caps_append (caps, mediacaps);
    } else {
      /* failed to convert dshow caps */
      gst_dshow_free_pin_mediatype (pin_mediatype);
    }
  }

  enum_mediatypes->Release ();

  if (caps && gst_caps_is_empty (caps)) {
    gst_caps_unref (caps);
    caps = NULL;
  }

  return caps;
}
Ejemplo n.º 6
0
static gboolean
gst_raw_video_parse_get_caps_from_config (GstRawBaseParse * raw_base_parse,
    GstRawBaseParseConfig config, GstCaps ** caps)
{
  GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
  GstRawVideoParseConfig *config_ptr =
      gst_raw_video_parse_get_config_ptr (raw_video_parse, config);

  g_assert (caps != NULL);

  *caps = gst_video_info_to_caps (&(config_ptr->info));

  return *caps != NULL;
}
Ejemplo n.º 7
0
static GstCaps *
gst_openni2_src_get_caps (GstBaseSrc * src, GstCaps * filter)
{
  GstOpenni2Src *ni2src;
  GstCaps *caps;
  GstVideoInfo info;
  GstVideoFormat format;

  ni2src = GST_OPENNI2_SRC (src);

  GST_OBJECT_LOCK (ni2src);
  if (ni2src->gst_caps)
    goto out;

  // If we are here, we need to compose the caps and return them.

  if (ni2src->depth->isValid () && ni2src->color->isValid () &&
      ni2src->sourcetype == SOURCETYPE_BOTH
      && ni2src->colorpixfmt == openni::PIXEL_FORMAT_RGB888) {
    format = GST_VIDEO_FORMAT_RGBA;
  } else if (ni2src->depth->isValid () &&
             ni2src->sourcetype == SOURCETYPE_DEPTH) {
    format = GST_VIDEO_FORMAT_GRAY16_LE;
  } else if (ni2src->color->isValid () && ni2src->sourcetype == SOURCETYPE_COLOR
      && ni2src->colorpixfmt == openni::PIXEL_FORMAT_RGB888) {
    format = GST_VIDEO_FORMAT_RGB;
  } else {
    goto out;
  }

  gst_video_info_init (&info);
  gst_video_info_set_format (&info, format, ni2src->width, ni2src->height);
  info.fps_n = ni2src->fps;
  info.fps_d = 1;
  caps = gst_video_info_to_caps (&info);

  GST_INFO_OBJECT (ni2src, "probed caps: %" GST_PTR_FORMAT, caps);
  ni2src->gst_caps = caps;

out:
  GST_OBJECT_UNLOCK (ni2src);

  if (!ni2src->gst_caps)
    return gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (ni2src));

  return (filter)
      ? gst_caps_intersect_full (filter, ni2src->gst_caps,
      GST_CAPS_INTERSECT_FIRST)
      : gst_caps_ref (ni2src->gst_caps);
}
Ejemplo n.º 8
0
static gboolean
theora_dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
{
  GstTheoraDec *dec = GST_THEORA_DEC (decoder);
  GstVideoCodecState *state;
  GstBufferPool *pool;
  guint size, min, max;
  GstStructure *config;

  if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
          query))
    return FALSE;

  state = gst_video_decoder_get_output_state (decoder);

  gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

  dec->can_crop = FALSE;
  config = gst_buffer_pool_get_config (pool);
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
    dec->can_crop =
        gst_query_find_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE,
        NULL);
  }

  if (dec->can_crop) {
    GstVideoInfo info = state->info;
    GstCaps *caps;

    /* Calculate uncropped size */
    gst_video_info_set_format (&info, info.finfo->format, dec->info.frame_width,
        dec->info.frame_height);
    size = MAX (size, info.size);
    caps = gst_video_info_to_caps (&info);
    gst_buffer_pool_config_set_params (config, caps, size, min, max);
    gst_caps_unref (caps);
  }

  gst_buffer_pool_set_config (pool, config);

  gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);

  gst_object_unref (pool);
  gst_video_codec_state_unref (state);

  return TRUE;
}
Ejemplo n.º 9
0
static gboolean
gst_vtdec_negotiate_downstream (GstVTDec * self)
{
  gboolean result;
  GstCaps *caps;

  if (!gst_pad_check_reconfigure (self->srcpad))
    return TRUE;

  caps = gst_video_info_to_caps (&self->vinfo);
  result = gst_pad_set_caps (self->srcpad, caps);
  gst_caps_unref (caps);

  return result;
}
Ejemplo n.º 10
0
static gboolean
gst_mfc_dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
{
  GstMFCDec *self = GST_MFC_DEC (decoder);
  GstBufferPool *pool;
  GstStructure *config;
  guint size, min, max;

  if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
          query))
    return FALSE;

  g_assert (gst_query_get_n_allocation_pools (query) > 0);
  gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
  g_assert (pool != NULL);

  self->has_cropping = FALSE;
  config = gst_buffer_pool_get_config (pool);
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
    self->has_cropping =
        gst_query_find_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE,
        NULL);
  }

  if (self->has_cropping) {
    GstVideoInfo info;
    GstCaps *caps;

    /* Calculate uncropped size */
    gst_buffer_pool_config_get_params (config, &caps, &size, &min, &max);
    gst_video_info_from_caps (&info, caps);
    gst_video_info_set_format (&info, self->format, self->width, self->height);
    size = MAX (size, info.size);
    caps = gst_video_info_to_caps (&info);
    gst_buffer_pool_config_set_params (config, caps, size, min, max);
    gst_caps_unref (caps);
  }

  gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);

  gst_buffer_pool_set_config (pool, config);
  gst_object_unref (pool);

  return TRUE;
}
Ejemplo n.º 11
0
static gboolean
gst_vtdec_negotiate_output_format (GstVtdec * vtdec,
    GstVideoCodecState * input_state)
{
  GstCaps *caps = NULL, *peercaps = NULL, *templcaps;
  GstVideoFormat output_format;
  GstVideoCodecState *output_state = NULL;
  GstCapsFeatures *features;
  GstStructure *structure;
  const gchar *s;

  peercaps = gst_pad_peer_query_caps (GST_VIDEO_DECODER_SRC_PAD (vtdec), NULL);

  /* Check if output supports GL caps by preference */
  templcaps = gst_pad_get_pad_template_caps (GST_VIDEO_DECODER_SRC_PAD (vtdec));
  caps =
      gst_caps_intersect_full (templcaps, peercaps, GST_CAPS_INTERSECT_FIRST);

  gst_caps_unref (peercaps);
  gst_caps_unref (templcaps);

  caps = gst_caps_truncate (caps);
  structure = gst_caps_get_structure (caps, 0);
  s = gst_structure_get_string (structure, "format");
  output_format = gst_video_format_from_string (s);
  features = gst_caps_features_copy (gst_caps_get_features (caps, 0));

  gst_caps_unref (caps);

  if (!gst_vtdec_create_session (vtdec, output_format)) {
    gst_caps_features_free (features);
    return FALSE;
  }

  output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (vtdec),
      output_format, vtdec->video_info.width, vtdec->video_info.height,
      input_state);

  output_state->caps = gst_video_info_to_caps (&output_state->info);
  gst_caps_set_features (output_state->caps, 0, features);

  return TRUE;
}
Ejemplo n.º 12
0
static GstCaps *
gst_euresys_get_camera_caps (GstEuresys * src)
{
  INT32 colorFormat;
  GstVideoFormat videoFormat;
  GstCaps *caps;
  GstVideoInfo vinfo;
  gint32 width, height;
  int status;

  g_assert (src->hChannel != 0);

  status = McGetParamInt (src->hChannel, MC_ColorFormat, &colorFormat);
  status |= McGetParamInt (src->hChannel, MC_ImageSizeX, &width);
  status |= McGetParamInt (src->hChannel, MC_ImageSizeY, &height);
  if (status != MC_OK) {
    GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS,
        (("Failed to get color format, width, and/or height.")), (NULL));
    return NULL;
  }

  videoFormat = gst_euresys_color_format_to_video_format (colorFormat);
  if (videoFormat == GST_VIDEO_FORMAT_UNKNOWN) {
    GST_ELEMENT_ERROR (src, STREAM, WRONG_TYPE,
        (("Unknown or unsupported color format.")), (NULL));
    return NULL;
  }

  gst_video_info_init (&vinfo);
  gst_video_info_set_format (&vinfo, videoFormat, width, height);
  vinfo.fps_n = 30;
  vinfo.fps_d = 1;
  caps = gst_video_info_to_caps (&vinfo);

  if (caps == NULL) {
    GST_ELEMENT_ERROR (src, STREAM, TOO_LAZY,
        (("Failed to generate caps from video format.")), (NULL));
    return NULL;
  }

  return caps;
}
Ejemplo n.º 13
0
static GstCaps *
_update_caps (GstVideoAggregator * vagg, GstCaps * caps)
{
  GList *l;
  gint best_width = -1, best_height = -1;
  GstVideoInfo info;
  GstCaps *ret = NULL;

  gst_video_info_from_caps (&info, caps);

  GST_OBJECT_LOCK (vagg);
  for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
    GstVideoAggregatorPad *vaggpad = l->data;
    GstCompositorPad *compositor_pad = GST_COMPOSITOR_PAD (vaggpad);
    gint this_width, this_height;
    gint width, height;

    width = GST_VIDEO_INFO_WIDTH (&vaggpad->info);
    height = GST_VIDEO_INFO_HEIGHT (&vaggpad->info);

    if (width == 0 || height == 0)
      continue;

    this_width = width + MAX (compositor_pad->xpos, 0);
    this_height = height + MAX (compositor_pad->ypos, 0);

    if (best_width < this_width)
      best_width = this_width;
    if (best_height < this_height)
      best_height = this_height;
  }
  GST_OBJECT_UNLOCK (vagg);

  if (best_width > 0 && best_height > 0) {
    info.width = best_width;
    info.height = best_height;
    if (set_functions (GST_COMPOSITOR (vagg), &info))
      ret = gst_video_info_to_caps (&info);
  }

  return ret;
}
Ejemplo n.º 14
0
static gboolean
gst_gl_mixer_query_caps (GstPad * pad, GstAggregator * agg, GstQuery * query)
{
  GstCaps *filter, *caps;
  GstStructure *s;
  gint n;

  GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);

  gst_query_parse_caps (query, &filter);

  if (GST_VIDEO_INFO_FORMAT (&vagg->info) != GST_VIDEO_FORMAT_UNKNOWN) {
    caps = gst_video_info_to_caps (&vagg->info);
  } else {
    caps = gst_pad_get_pad_template_caps (agg->srcpad);
  }

  caps = gst_caps_make_writable (caps);

  n = gst_caps_get_size (caps) - 1;
  for (; n >= 0; n--) {
    s = gst_caps_get_structure (caps, n);
    gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
        "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
    if (GST_VIDEO_INFO_FPS_D (&vagg->info) != 0) {
      gst_structure_set (s,
          "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
    }
  }

  if (filter)
    caps = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);

  gst_query_set_caps_result (query, caps);
  gst_caps_unref (caps);

  return TRUE;
}
Ejemplo n.º 15
0
static GstCaps *
gst_video_parse_get_caps (GstRawParse * rp)
{
  GstVideoParse *vp = GST_VIDEO_PARSE (rp);
  GstVideoInfo info;
  GstCaps *caps;
  gint fps_n, fps_d;

  gst_raw_parse_get_fps (rp, &fps_n, &fps_d);

  gst_video_info_init (&info);
  gst_video_info_set_format (&info, vp->format, vp->width, vp->height);
  info.fps_n = fps_n;
  info.fps_d = fps_d;
  info.par_n = vp->par_n;
  info.par_d = vp->par_d;
  info.interlace_mode = vp->interlaced ?
      GST_VIDEO_INTERLACE_MODE_INTERLEAVED :
      GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;

  caps = gst_video_info_to_caps (&info);

  return caps;
}
Ejemplo n.º 16
0
static GstFlowReturn
gst_y4m_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
  GstY4mDec *y4mdec;
  int n_avail;
  GstFlowReturn flow_ret = GST_FLOW_OK;
#define MAX_HEADER_LENGTH 80
  char header[MAX_HEADER_LENGTH];
  int i;
  int len;

  y4mdec = GST_Y4M_DEC (parent);

  GST_DEBUG_OBJECT (y4mdec, "chain");

  if (GST_BUFFER_IS_DISCONT (buffer)) {
    GST_DEBUG ("got discont");
    gst_adapter_clear (y4mdec->adapter);
  }

  gst_adapter_push (y4mdec->adapter, buffer);
  n_avail = gst_adapter_available (y4mdec->adapter);

  if (!y4mdec->have_header) {
    gboolean ret;
    GstCaps *caps;

    if (n_avail < MAX_HEADER_LENGTH)
      return GST_FLOW_OK;

    gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH);

    header[MAX_HEADER_LENGTH - 1] = 0;
    for (i = 0; i < MAX_HEADER_LENGTH; i++) {
      if (header[i] == 0x0a)
        header[i] = 0;
    }

    ret = gst_y4m_dec_parse_header (y4mdec, header);
    if (!ret) {
      GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE,
          ("Failed to parse YUV4MPEG header"), (NULL));
      return GST_FLOW_ERROR;
    }

    y4mdec->header_size = strlen (header) + 1;
    gst_adapter_flush (y4mdec->adapter, y4mdec->header_size);

    caps = gst_video_info_to_caps (&y4mdec->info);
    ret = gst_pad_set_caps (y4mdec->srcpad, caps);
    gst_caps_unref (caps);
    if (!ret) {
      GST_DEBUG_OBJECT (y4mdec, "Couldn't set caps on src pad");
      return GST_FLOW_ERROR;
    }

    y4mdec->have_header = TRUE;
  }

  if (y4mdec->have_new_segment) {
    GstEvent *event;
    GstClockTime start = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.start);
    GstClockTime stop = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.stop);
    GstClockTime time = gst_y4m_dec_bytes_to_timestamp (y4mdec,
        y4mdec->segment.time);
    GstSegment seg;

    gst_segment_init (&seg, GST_FORMAT_TIME);
    seg.start = start;
    seg.stop = stop;
    seg.time = time;
    event = gst_event_new_segment (&seg);

    gst_pad_push_event (y4mdec->srcpad, event);
    //gst_event_unref (event);

    y4mdec->have_new_segment = FALSE;
    y4mdec->frame_index = gst_y4m_dec_bytes_to_frames (y4mdec,
        y4mdec->segment.time);
    GST_DEBUG ("new frame_index %d", y4mdec->frame_index);

  }

  while (1) {
    n_avail = gst_adapter_available (y4mdec->adapter);
    if (n_avail < MAX_HEADER_LENGTH)
      break;

    gst_adapter_copy (y4mdec->adapter, (guint8 *) header, 0, MAX_HEADER_LENGTH);
    header[MAX_HEADER_LENGTH - 1] = 0;
    for (i = 0; i < MAX_HEADER_LENGTH; i++) {
      if (header[i] == 0x0a)
        header[i] = 0;
    }
    if (memcmp (header, "FRAME", 5) != 0) {
      GST_ELEMENT_ERROR (y4mdec, STREAM, DECODE,
          ("Failed to parse YUV4MPEG frame"), (NULL));
      flow_ret = GST_FLOW_ERROR;
      break;
    }

    len = strlen (header);
    if (n_avail < y4mdec->info.size + len + 1) {
      /* not enough data */
      GST_DEBUG ("not enough data for frame %d < %" G_GSIZE_FORMAT,
          n_avail, y4mdec->info.size + len + 1);
      break;
    }

    gst_adapter_flush (y4mdec->adapter, len + 1);

    buffer = gst_adapter_take_buffer (y4mdec->adapter, y4mdec->info.size);

    GST_BUFFER_TIMESTAMP (buffer) =
        gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index);
    GST_BUFFER_DURATION (buffer) =
        gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index + 1) -
        GST_BUFFER_TIMESTAMP (buffer);

    y4mdec->frame_index++;

    flow_ret = gst_pad_push (y4mdec->srcpad, buffer);
    if (flow_ret != GST_FLOW_OK)
      break;
  }

  GST_DEBUG ("returning %d", flow_ret);

  return flow_ret;
}
Ejemplo n.º 17
0
static gboolean
gst_vtdec_negotiate (GstVideoDecoder * decoder)
{
  GstVideoCodecState *output_state = NULL;
  GstCaps *peercaps = NULL, *caps = NULL, *templcaps = NULL, *prevcaps = NULL;
  GstVideoFormat format;
  GstStructure *structure;
  const gchar *s;
  GstVtdec *vtdec;
  OSStatus err = noErr;
  GstCapsFeatures *features = NULL;
  gboolean output_textures;

  vtdec = GST_VTDEC (decoder);
  if (vtdec->session)
    gst_vtdec_push_frames_if_needed (vtdec, TRUE, FALSE);

  output_state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (vtdec));
  if (output_state) {
    prevcaps = gst_caps_ref (output_state->caps);
    gst_video_codec_state_unref (output_state);
  }

  peercaps = gst_pad_peer_query_caps (GST_VIDEO_DECODER_SRC_PAD (vtdec), NULL);
  if (prevcaps && gst_caps_can_intersect (prevcaps, peercaps)) {
    /* The hardware decoder can become (temporarily) unavailable across
     * VTDecompressionSessionCreate/Destroy calls. So if the currently configured
     * caps are still accepted by downstream we keep them so we don't have to
     * destroy and recreate the session.
     */
    GST_INFO_OBJECT (vtdec,
        "current and peer caps are compatible, keeping current caps");
    caps = gst_caps_ref (prevcaps);
  } else {
    templcaps =
        gst_pad_get_pad_template_caps (GST_VIDEO_DECODER_SRC_PAD (decoder));
    caps =
        gst_caps_intersect_full (peercaps, templcaps, GST_CAPS_INTERSECT_FIRST);
    gst_caps_unref (templcaps);
  }
  gst_caps_unref (peercaps);

  caps = gst_caps_truncate (gst_caps_make_writable (caps));
  structure = gst_caps_get_structure (caps, 0);
  s = gst_structure_get_string (structure, "format");
  format = gst_video_format_from_string (s);
  features = gst_caps_get_features (caps, 0);
  if (features)
    features = gst_caps_features_copy (features);

  output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (vtdec),
      format, vtdec->video_info.width, vtdec->video_info.height,
      vtdec->input_state);
  output_state->caps = gst_video_info_to_caps (&output_state->info);
  if (features) {
    gst_caps_set_features (output_state->caps, 0, features);
    output_textures =
        gst_caps_features_contains (features,
        GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
    if (output_textures)
      gst_caps_set_simple (output_state->caps, "texture-target", G_TYPE_STRING,
#if !HAVE_IOS
          GST_GL_TEXTURE_TARGET_RECTANGLE_STR,
#else
          GST_GL_TEXTURE_TARGET_2D_STR,
#endif
          NULL);
  }
  gst_caps_unref (caps);

  if (!prevcaps || !gst_caps_is_equal (prevcaps, output_state->caps)) {
    gboolean renegotiating = vtdec->session != NULL;

    GST_INFO_OBJECT (vtdec,
        "negotiated output format %" GST_PTR_FORMAT " previous %"
        GST_PTR_FORMAT, output_state->caps, prevcaps);

    if (vtdec->session)
      gst_vtdec_invalidate_session (vtdec);

    err = gst_vtdec_create_session (vtdec, format, TRUE);
    if (err == noErr) {
      GST_INFO_OBJECT (vtdec, "using hardware decoder");
    } else if (err == kVTVideoDecoderNotAvailableNowErr && renegotiating) {
      GST_WARNING_OBJECT (vtdec, "hw decoder not available anymore");
      err = gst_vtdec_create_session (vtdec, format, FALSE);
    }

    if (err != noErr) {
      GST_ELEMENT_ERROR (vtdec, RESOURCE, FAILED, (NULL),
          ("VTDecompressionSessionCreate returned %d", (int) err));
    }
  }

  if (vtdec->texture_cache != NULL && !output_textures) {
    gst_video_texture_cache_free (vtdec->texture_cache);
    vtdec->texture_cache = NULL;
  }

  if (err == noErr && output_textures) {
    /* call this regardless of whether caps have changed or not since a new
     * local context could have become available
     */
    gst_gl_context_helper_ensure_context (vtdec->ctxh);

    GST_INFO_OBJECT (vtdec, "pushing textures, context %p old context %p",
        vtdec->ctxh->context,
        vtdec->texture_cache ? vtdec->texture_cache->ctx : NULL);

    if (vtdec->texture_cache
        && vtdec->texture_cache->ctx != vtdec->ctxh->context) {
      gst_video_texture_cache_free (vtdec->texture_cache);
      vtdec->texture_cache = NULL;
    }
    if (!vtdec->texture_cache)
      setup_texture_cache (vtdec, vtdec->ctxh->context);
  }

  if (prevcaps)
    gst_caps_unref (prevcaps);

  if (err != noErr)
    return FALSE;

  return GST_VIDEO_DECODER_CLASS (gst_vtdec_parent_class)->negotiate (decoder);
}
/* Test Callbacks  and vmethods*/
static GstPipeline *
create_pipeline (InsanityGstPipelineTest * ptest, gpointer unused_data)
{
  GstCaps *caps;
  gulong probe_id;
  GError *err = NULL;
  GstIterator *it = NULL;
  gchar *uri = NULL, *sublocation = NULL;
  GstElement *capsfilter = NULL, *capsfilter1 = NULL, *colorspace =
      NULL, *colorspace1 = NULL, *fakesink = NULL;
  GstPad *fakesinksink = NULL, *tmppad = NULL;

  InsanityTest *test = INSANITY_TEST (ptest);

  SUBTITLES_TEST_LOCK ();
  glob_pipeline = GST_ELEMENT (gst_pipeline_new ("pipeline"));

  /* Create the source */
  insanity_test_get_boolean_argument (test, "push-mode",
      (gboolean *) & glob_push_mode);

  insanity_test_get_string_argument (test, "sublocation", &sublocation);
  if (sublocation == NULL || g_strcmp0 (sublocation, "") == 0) {
    ERROR (test, "Location name not set\n");
    goto creation_failed;
  }

  uri = gst_filename_to_uri (sublocation, &err);
  if (err != NULL) {
    ERROR (test, "Error creating uri %s", err->message);

    goto creation_failed;
  }
  if (glob_push_mode == TRUE) {
    gchar *tmpuri;

    glob_uridecodebin = gst_element_factory_make ("pushfilesrc", "src");
    tmpuri = g_strconcat ("push", uri, NULL);
    g_free (uri);

    uri = tmpuri;
  }

  glob_uridecodebin = gst_element_factory_make ("uridecodebin", "src");
  g_signal_connect (glob_uridecodebin, "pad-added", G_CALLBACK (pad_added_cb),
      test);
  g_object_set (glob_uridecodebin, "uri", uri, NULL);

  /* the subtitleoverlay */
  glob_suboverlay =
      gst_element_factory_make ("subtitleoverlay", "subtitleoverlay");
  if (glob_suboverlay == NULL)
    goto creation_failed;

  /* the fakesink */
  fakesink = gst_element_factory_make ("fakesink", "fakesink");
  if (fakesink == NULL)
    goto creation_failed;

  /* and the videotestsrc */
  glob_videotestsrc = gst_element_factory_make ("videotestsrc", "videotestsrc");
  if (glob_videotestsrc == NULL)
    goto creation_failed;
  g_object_set (glob_videotestsrc, "pattern", 2, "do-timestamp", TRUE, NULL);

  /* Make sure the video is big enough */
  capsfilter = gst_element_factory_make ("capsfilter", NULL);
  if (capsfilter == NULL)
    goto creation_failed;

  gst_video_info_init (&glob_video_info);
  gst_video_info_set_format (&glob_video_info, GST_VIDEO_FORMAT_RGB, 1920,
      1080);
  caps = gst_video_info_to_caps (&glob_video_info);

  g_object_set (capsfilter, "caps", caps, NULL);

  capsfilter1 = gst_element_factory_make ("capsfilter", NULL);
  if (capsfilter1 == NULL)
    goto creation_failed;

  /* We want the last frame that we will "parse" to check if it contains
   * subtitles to be in RGB to make simpler for us */
  g_object_set (capsfilter1, "caps", caps, NULL);

  colorspace = gst_element_factory_make ("videoconvert", NULL);
  if (colorspace == NULL)
    goto creation_failed;

  colorspace1 = gst_element_factory_make ("videoconvert", NULL);
  if (colorspace1 == NULL)
    goto creation_failed;

  /* Now add to the pipeline */
  gst_bin_add_many (GST_BIN (glob_pipeline), glob_uridecodebin,
      glob_videotestsrc, capsfilter, glob_suboverlay,
      capsfilter1, colorspace, colorspace1, fakesink, NULL);

  /* link video branch elements */
  gst_element_link_many (glob_videotestsrc, capsfilter,
      glob_suboverlay, colorspace, capsfilter1, fakesink, NULL);

  /* And install a probe to the subtitleoverlay src pad */
  fakesinksink = gst_element_get_static_pad (fakesink, "sink");
  if (fakesinksink == NULL)
    goto failed;

  if (insanity_gst_test_add_data_probe (INSANITY_GST_TEST (test),
          GST_BIN (glob_pipeline), GST_OBJECT_NAME (fakesink),
          GST_OBJECT_NAME (fakesinksink), &tmppad, &probe_id,
          &probe_cb, NULL, NULL) == TRUE) {

    glob_suboverlay_src_probe = g_slice_new0 (ProbeContext);
    glob_suboverlay_src_probe->probe_id = probe_id;
    glob_suboverlay_src_probe->pad = fakesinksink;
    glob_suboverlay_src_probe->element = fakesink;
    glob_suboverlay_src_probe->test = test;
    glob_suboverlay_src_probe->waiting_first_segment = TRUE;

    insanity_test_validate_checklist_item (test, "install-probes", TRUE, NULL);
  } else {
    insanity_test_validate_checklist_item (test,
        "install-probes", FALSE, "Failed to attach probe to fakesink");
    insanity_test_done (test);
    goto failed;
  }

  g_signal_connect (GST_CHILD_PROXY (glob_suboverlay), "child-added",
      G_CALLBACK (suboverlay_child_added_cb), test);

done:
  SUBTITLES_TEST_UNLOCK ();

  g_free (uri);
  g_free (sublocation);
  if (err != NULL)
    g_error_free (err);
  if (it != NULL)
    gst_iterator_free (it);

  return GST_PIPELINE (glob_pipeline);

failed:
  if (glob_pipeline != NULL)
    gst_object_unref (glob_pipeline);

  glob_suboverlay = glob_pipeline = glob_videotestsrc =
      glob_uridecodebin = NULL;
  goto done;

creation_failed:
  if (glob_uridecodebin != NULL)
    gst_object_unref (glob_uridecodebin);
  if (glob_suboverlay != NULL)
    gst_object_unref (glob_suboverlay);
  if (glob_videotestsrc != NULL)
    gst_object_unref (glob_videotestsrc);
  if (fakesink != NULL)
    gst_object_unref (fakesink);

  goto failed;
}
Ejemplo n.º 19
0
static gboolean
gst_vaapidecode_update_src_caps (GstVaapiDecode * decode)
{
  GstVideoDecoder *const vdec = GST_VIDEO_DECODER (decode);
  GstPad *const srcpad = GST_VIDEO_DECODER_SRC_PAD (vdec);
  GstCaps *allowed;
  GstVideoCodecState *state, *ref_state;
  GstVaapiCapsFeature feature;
  GstCapsFeatures *features;
  GstCaps *allocation_caps;
  GstVideoInfo *vi;
  GstVideoFormat format;
  GstClockTime latency;
  gint fps_d, fps_n;
  guint width, height;
  const gchar *format_str, *feature_str;

  if (!decode->input_state)
    return FALSE;

  ref_state = decode->input_state;

  format = GST_VIDEO_INFO_FORMAT (&decode->decoded_info);
  allowed = gst_vaapidecode_get_allowed_srcpad_caps (decode);
  feature = gst_vaapi_find_preferred_caps_feature (srcpad, allowed, &format);
  gst_caps_unref (allowed);

  if (feature == GST_VAAPI_CAPS_FEATURE_NOT_NEGOTIATED)
    return FALSE;

#if (!USE_GLX && !USE_EGL)
  /* This is a very pathological situation. Should not happen. */
  if (feature == GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META)
    return FALSE;
#endif

  if ((feature == GST_VAAPI_CAPS_FEATURE_SYSTEM_MEMORY ||
          feature == GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE)
      && format != GST_VIDEO_INFO_FORMAT (&decode->decoded_info)) {
    GST_FIXME_OBJECT (decode, "validate if driver can convert from %s to %s",
        gst_video_format_to_string (GST_VIDEO_INFO_FORMAT
            (&decode->decoded_info)), gst_video_format_to_string (format));
  }

  width = decode->display_width;
  height = decode->display_height;

  if (!width || !height) {
    width = GST_VIDEO_INFO_WIDTH (&ref_state->info);
    height = GST_VIDEO_INFO_HEIGHT (&ref_state->info);
  }

  state = gst_video_decoder_set_output_state (vdec, format, width, height,
      ref_state);
  if (!state)
    return FALSE;

  if (GST_VIDEO_INFO_WIDTH (&state->info) == 0
      || GST_VIDEO_INFO_HEIGHT (&state->info) == 0) {
    gst_video_codec_state_unref (state);
    return FALSE;
  }

  vi = &state->info;
  state->caps = gst_video_info_to_caps (vi);

  switch (feature) {
    case GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META:
    case GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE:{
      GstStructure *structure = gst_caps_get_structure (state->caps, 0);

      /* Remove chroma-site and colorimetry from src caps,
       * which is unnecessary on downstream if using VASurface
       */
      gst_structure_remove_fields (structure, "chroma-site", "colorimetry",
          NULL);

      feature_str = gst_vaapi_caps_feature_to_string (feature);
      features = gst_caps_features_new (feature_str, NULL);
      gst_caps_set_features (state->caps, 0, features);
      break;
    }
    default:
      break;
  }

  /* Allocation query is different from pad's caps */
  allocation_caps = NULL;
  if (GST_VIDEO_INFO_WIDTH (&decode->decoded_info) != width
      || GST_VIDEO_INFO_HEIGHT (&decode->decoded_info) != height) {
    allocation_caps = gst_caps_copy (state->caps);
    format_str = gst_video_format_to_string (format);
    gst_caps_set_simple (allocation_caps,
        "width", G_TYPE_INT, GST_VIDEO_INFO_WIDTH (&decode->decoded_info),
        "height", G_TYPE_INT, GST_VIDEO_INFO_HEIGHT (&decode->decoded_info),
        "format", G_TYPE_STRING, format_str, NULL);
    GST_INFO_OBJECT (decode, "new alloc caps = %" GST_PTR_FORMAT,
        allocation_caps);
  }
  gst_caps_replace (&state->allocation_caps, allocation_caps);
  if (allocation_caps)
    gst_caps_unref (allocation_caps);

  GST_INFO_OBJECT (decode, "new src caps = %" GST_PTR_FORMAT, state->caps);
  gst_caps_replace (&decode->srcpad_caps, state->caps);
  gst_video_codec_state_unref (state);

  fps_n = GST_VIDEO_INFO_FPS_N (vi);
  fps_d = GST_VIDEO_INFO_FPS_D (vi);
  if (fps_n <= 0 || fps_d <= 0) {
    GST_DEBUG_OBJECT (decode, "forcing 25/1 framerate for latency calculation");
    fps_n = 25;
    fps_d = 1;
  }

  /* For parsing/preparation purposes we'd need at least 1 frame
   * latency in general, with perfectly known unit boundaries (NALU,
   * AU), and up to 2 frames when we need to wait for the second frame
   * start to determine the first frame is complete */
  latency = gst_util_uint64_scale (2 * GST_SECOND, fps_d, fps_n);
  gst_video_decoder_set_latency (vdec, latency, latency);

  return TRUE;
}
Ejemplo n.º 20
0
static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer)
{
    WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
    WebKitVideoSinkPrivate* priv = sink->priv;

    g_mutex_lock(priv->bufferMutex);

    if (priv->unlocked) {
        g_mutex_unlock(priv->bufferMutex);
        return GST_FLOW_OK;
    }

#if USE(NATIVE_FULLSCREEN_VIDEO)
    // Ignore buffers if the video is already in fullscreen using
    // another sink.
    if (priv->gstGWorld->isFullscreen()) {
        g_mutex_unlock(priv->bufferMutex);
        return GST_FLOW_OK;
    }
#endif

    priv->buffer = gst_buffer_ref(buffer);

#ifndef GST_API_VERSION_1
    // For the unlikely case where the buffer has no caps, the caps
    // are implicitely the caps of the pad. This shouldn't happen.
    if (UNLIKELY(!GST_BUFFER_CAPS(buffer))) {
        buffer = priv->buffer = gst_buffer_make_metadata_writable(priv->buffer);
        gst_buffer_set_caps(priv->buffer, GST_PAD_CAPS(GST_BASE_SINK_PAD(baseSink)));
    }

    GRefPtr<GstCaps> caps = GST_BUFFER_CAPS(buffer);
#else
    GRefPtr<GstCaps> caps;
    // The video info structure is valid only if the sink handled an allocation query.
    if (GST_VIDEO_INFO_FORMAT(&priv->info) != GST_VIDEO_FORMAT_UNKNOWN)
        caps = adoptGRef(gst_video_info_to_caps(&priv->info));
    else
        caps = priv->currentCaps;
#endif

    GstVideoFormat format;
    WebCore::IntSize size;
    int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
    if (!getVideoSizeAndFormatFromCaps(caps.get(), size, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride)) {
        gst_buffer_unref(buffer);
        g_mutex_unlock(priv->bufferMutex);
        return GST_FLOW_ERROR;
    }

    // Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't.
    // Here we convert to Cairo's ARGB.
    if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) {
        // Because GstBaseSink::render() only owns the buffer reference in the
        // method scope we can't use gst_buffer_make_writable() here. Also
        // The buffer content should not be changed here because the same buffer
        // could be passed multiple times to this method (in theory).

        GstBuffer* newBuffer = createGstBuffer(buffer);

        // Check if allocation failed.
        if (UNLIKELY(!newBuffer)) {
            g_mutex_unlock(priv->bufferMutex);
            return GST_FLOW_ERROR;
        }

        // We don't use Color::premultipliedARGBFromColor() here because
        // one function call per video pixel is just too expensive:
        // For 720p/PAL for example this means 1280*720*25=23040000
        // function calls per second!
#ifndef GST_API_VERSION_1
        const guint8* source = GST_BUFFER_DATA(buffer);
        guint8* destination = GST_BUFFER_DATA(newBuffer);
#else
        GstMapInfo sourceInfo;
        GstMapInfo destinationInfo;
        gst_buffer_map(buffer, &sourceInfo, GST_MAP_READ);
        const guint8* source = const_cast<guint8*>(sourceInfo.data);
        gst_buffer_map(newBuffer, &destinationInfo, GST_MAP_WRITE);
        guint8* destination = static_cast<guint8*>(destinationInfo.data);
#endif

        for (int x = 0; x < size.height(); x++) {
            for (int y = 0; y < size.width(); y++) {
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
                unsigned short alpha = source[3];
                destination[0] = (source[0] * alpha + 128) / 255;
                destination[1] = (source[1] * alpha + 128) / 255;
                destination[2] = (source[2] * alpha + 128) / 255;
                destination[3] = alpha;
#else
                unsigned short alpha = source[0];
                destination[0] = alpha;
                destination[1] = (source[1] * alpha + 128) / 255;
                destination[2] = (source[2] * alpha + 128) / 255;
                destination[3] = (source[3] * alpha + 128) / 255;
#endif
                source += 4;
                destination += 4;
            }
        }

#ifdef GST_API_VERSION_1
        gst_buffer_unmap(buffer, &sourceInfo);
        gst_buffer_unmap(newBuffer, &destinationInfo);
#endif
        gst_buffer_unref(buffer);
        buffer = priv->buffer = newBuffer;
    }

    // This should likely use a lower priority, but glib currently starves
    // lower priority sources.
    // See: https://bugzilla.gnome.org/show_bug.cgi?id=610830.
    priv->timeoutId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, webkitVideoSinkTimeoutCallback,
                                          gst_object_ref(sink), reinterpret_cast<GDestroyNotify>(gst_object_unref));

    g_cond_wait(priv->dataCondition, priv->bufferMutex);
    g_mutex_unlock(priv->bufferMutex);
    return GST_FLOW_OK;
}
Ejemplo n.º 21
0
static gboolean
gst_genicamsrc_start (GstBaseSrc * bsrc)
{
  GstGenicamSrc *src = GST_GENICAM_SRC (bsrc);
  GC_ERROR ret;
  uint32_t i, num_ifaces, num_devs;
  guint32 width, height, bpp, stride;
  GstVideoInfo vinfo;

  GST_DEBUG_OBJECT (src, "start");

  /* bind functions from CTI */
  if (!gst_genicamsrc_bind_functions (src)) {
    GST_ELEMENT_ERROR (src, LIBRARY, INIT,
        ("GenTL CTI could not be opened: %s", g_module_error ()), (NULL));
    return FALSE;
  }

  /* initialize library and print info */
  ret = GTL_GCInitLib ();
  HANDLE_GTL_ERROR ("GenTL Producer library could not be initialized");

  gst_genicam_print_gentl_impl_info (src);

  /* open GenTL, print info, and update interface list */
  ret = GTL_TLOpen (&src->hTL);
  HANDLE_GTL_ERROR ("System module failed to open");

  gst_genicam_print_system_info (src);

  ret = GTL_TLUpdateInterfaceList (src->hTL, NULL, src->timeout);
  HANDLE_GTL_ERROR ("Failed to update interface list within timeout");

  /* print info for all interfaces and open specified interface */
  ret = GTL_TLGetNumInterfaces (src->hTL, &num_ifaces);
  HANDLE_GTL_ERROR ("Failed to get number of interfaces");
  if (num_ifaces > 0) {
    GST_DEBUG_OBJECT (src, "Found %dGenTL interfaces", num_ifaces);
    for (i = 0; i < num_ifaces; ++i) {
      gst_genicam_print_interface_info (src, i);
    }
  } else {
    GST_ELEMENT_ERROR (src, LIBRARY, FAILED, ("No interfaces found"), (NULL));
    goto error;
  }

  if (!src->interface_id || src->interface_id[0] == 0) {
    size_t id_size;
    GST_DEBUG_OBJECT (src, "Trying to find interface ID at index %d",
        src->interface_index);

    ret = GTL_TLGetInterfaceID (src->hTL, src->interface_index, NULL, &id_size);
    HANDLE_GTL_ERROR ("Failed to get interface ID at specified index");
    if (src->interface_id) {
      g_free (src->interface_id);
    }
    src->interface_id = (gchar *) g_malloc (id_size);
    ret =
        GTL_TLGetInterfaceID (src->hTL, src->interface_index, src->interface_id,
        &id_size);
    HANDLE_GTL_ERROR ("Failed to get interface ID at specified index");
  }

  GST_DEBUG_OBJECT (src, "Trying to open interface '%s'", src->interface_id);
  ret = GTL_TLOpenInterface (src->hTL, src->interface_id, &src->hIF);
  HANDLE_GTL_ERROR ("Interface module failed to open");

  ret = GTL_IFUpdateDeviceList (src->hIF, NULL, src->timeout);
  HANDLE_GTL_ERROR ("Failed to update device list within timeout");

  /* print info for all devices and open specified device */
  ret = GTL_IFGetNumDevices (src->hIF, &num_devs);
  HANDLE_GTL_ERROR ("Failed to get number of devices");
  if (num_devs > 0) {
    for (i = 0; i < num_devs; ++i) {
      gst_genicam_print_device_info (src, i);
    }
  } else {
    GST_ELEMENT_ERROR (src, LIBRARY, FAILED,
        ("No devices found on interface"), (NULL));
    goto error;
  }

  if (!src->device_id || src->device_id[0] == 0) {
    size_t id_size;
    GST_DEBUG_OBJECT (src, "Trying to find device ID at index %d",
        src->device_index);

    GTL_IFGetDeviceID (src->hIF, src->device_index, NULL, &id_size);
    HANDLE_GTL_ERROR ("Failed to get device ID at specified index");
    if (src->device_id) {
      g_free (src->device_id);
    }
    src->device_id = (gchar *) g_malloc (id_size);
    GTL_IFGetDeviceID (src->hIF, src->device_index, src->device_id, &id_size);
    HANDLE_GTL_ERROR ("Failed to get device ID at specified index");
  }

  GST_DEBUG_OBJECT (src, "Trying to open device '%s'", src->device_id);
  ret =
      GTL_IFOpenDevice (src->hIF, src->device_id, DEVICE_ACCESS_CONTROL,
      &src->hDEV);
  HANDLE_GTL_ERROR ("Failed to open device");

  /* find and open specified data stream id */
  if (!src->stream_id || src->stream_id[0] == 0) {
    size_t id_size;
    GST_DEBUG_OBJECT (src, "Trying to find stream ID at index %d",
        src->stream_index);

    GTL_DevGetDataStreamID (src->hDEV, src->stream_index, NULL, &id_size);
    HANDLE_GTL_ERROR ("Failed to get stream ID at specified index");
    if (src->stream_id) {
      g_free (src->stream_id);
    }
    src->stream_id = (gchar *) g_malloc (id_size);
    GTL_DevGetDataStreamID (src->hDEV, src->stream_index, src->stream_id,
        &id_size);
    HANDLE_GTL_ERROR ("Failed to get stream ID at specified index");
  }

  GST_DEBUG_OBJECT (src, "Trying to open data stream '%s'", src->stream_id);
  ret = GTL_DevOpenDataStream (src->hDEV, src->stream_id, &src->hDS);
  HANDLE_GTL_ERROR ("Failed to open data stream");

  {
    uint32_t num_urls = 0;
    char url[2048];
    size_t url_len = sizeof (url);
    INFO_DATATYPE datatype;
    const uint32_t url_index = 0;

    ret = GTL_DevGetPort (src->hDEV, &src->hDevPort);
    HANDLE_GTL_ERROR ("Failed to get port on device");
    ret = GTL_GCGetNumPortURLs (src->hDevPort, &num_urls);
    HANDLE_GTL_ERROR ("Failed to get number of port URLs");

    GST_DEBUG_OBJECT (src, "Found %d port URLs", num_urls);

    GST_DEBUG_OBJECT (src, "Trying to get URL index %d", url_index);
    GTL_GCGetPortURLInfo (src->hDevPort, url_index, URL_INFO_URL, &datatype,
        url, &url_len);
    HANDLE_GTL_ERROR ("Failed to get URL");
    GST_DEBUG_OBJECT (src, "Found URL '%s'", url);

    g_assert (url_len > 6);
    if (g_str_has_prefix (url, "file")) {
      GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
          ("file url not supported yet"), (NULL));
      goto error;
    } else if (g_str_has_prefix (url, "local")) {
      GError *err = NULL;
      GMatchInfo *matchInfo;
      GRegex *regex;
      gchar *filename, *addr_str, *len_str;
      uint64_t addr;
      size_t len;
      gchar *buf;

      regex =
          g_regex_new
          ("local:(?:///)?(?<filename>[^;]+);(?<address>[^;]+);(?<length>[^?]+)(?:[?]SchemaVersion=([^&]+))?",
          (GRegexCompileFlags) 0, (GRegexMatchFlags) 0, &err);
      if (!regex) {
        goto error;
      }
      g_regex_match (regex, url, (GRegexMatchFlags) 0, &matchInfo);
      filename = g_match_info_fetch_named (matchInfo, "filename");
      addr_str = g_match_info_fetch_named (matchInfo, "address");
      len_str = g_match_info_fetch_named (matchInfo, "length");
      if (!filename || !addr_str || !len_str) {
        GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
            ("Failed to parse local URL"), (NULL));
        goto error;
      }

      addr = g_ascii_strtoull (addr_str, NULL, 16);
      len = g_ascii_strtoull (len_str, NULL, 16);
      buf = (gchar *) g_malloc (len);
      GTL_GCReadPort (src->hDevPort, addr, buf, &len);
      HANDLE_GTL_ERROR ("Failed to read XML from port");

      if (g_str_has_suffix (filename, "zip")) {
        gchar *zipfilepath;
        unzFile uf;
        unz_file_info64 fileinfo;
        gchar xmlfilename[2048];
        gchar *xml;

        zipfilepath = g_build_filename (g_get_tmp_dir (), filename, NULL);
        if (!g_file_set_contents (zipfilepath, buf, len, &err)) {
          GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
              ("Failed to write zipped XML to %s", zipfilepath), (NULL));
          goto error;
        }
        uf = unzOpen64 (zipfilepath);
        if (!uf) {
          GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
              ("Failed to open zipped XML %s", zipfilepath), (NULL));
          goto error;
        }
        //ret = unzGetGlobalInfo64(uf, &gi);
        ret =
            unzGetCurrentFileInfo64 (uf, &fileinfo, xmlfilename,
            sizeof (xmlfilename), NULL, 0, NULL, 0);
        if (ret != UNZ_OK) {
          GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
              ("Failed to query zip file %s", zipfilepath), (NULL));
          goto error;
        }

        ret = unzOpenCurrentFile (uf);
        if (ret != UNZ_OK) {
          GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
              ("Failed to extract file %s", xmlfilename), (NULL));
          goto error;
        }

        xml = (gchar *) g_malloc (fileinfo.uncompressed_size);
        if (!xml) {
          GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
              ("Failed to allocate memory to extract XML file"), (NULL));
          goto error;
        }

        ret = unzReadCurrentFile (uf, xml, fileinfo.uncompressed_size);
        if (ret != fileinfo.uncompressed_size) {
          GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
              ("Failed to extract XML file %s", xmlfilename), (NULL));
          goto error;
        }
        unzClose (uf);
        g_free (zipfilepath);

        zipfilepath = g_build_filename (g_get_tmp_dir (), xmlfilename, NULL);
        g_file_set_contents (zipfilepath, xml, fileinfo.uncompressed_size,
            &err);
        g_free (zipfilepath);

        g_free (xml);
        //GZlibDecompressor *decompress;
        //char *unzipped;
        //gsize outbuf_size, bytes_read, bytes_written;
        //GInputStream *zippedstream, *unzippedstream;
        //decompress = g_zlib_decompressor_new (G_ZLIB_COMPRESSOR_FORMAT_ZLIB);

        ////zippedstream = g_memory_input_stream_new_from_data(buf, len, g_free);
        ////unzippedstream = g_converter_input_stream_new (zippedstream, G_CONVERTER(decompress));
        ////g_input_stream_read_all (G_INPUT_STREAM(unzippedstream), 
        ////    g_converter_output_stream
        //outbuf_size = 10000000;
        //unzipped = (gchar*) g_malloc(outbuf_size);
        //g_converter_convert (G_CONVERTER (decompress), buf, len, unzipped, outbuf_size, G_CONVERTER_NO_FLAGS, &bytes_read, &bytes_written, &err);
        //GST_DEBUG_OBJECT (src, unzipped);
      }

      g_free (filename);
      g_free (addr_str);
      g_free (len_str);
      g_free (buf);
    } else if (g_str_has_prefix (url, "http")) {
      GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY,
          ("file url not supported yet"), (NULL));
      goto error;
    }
  }

  {
    // TODO: use Genicam node map for this
    guint32 val = 0;
    size_t datasize = 4;
    ret = GTL_GCReadPort (src->hDevPort, 0x30204, &val, &datasize);
    HANDLE_GTL_ERROR ("Failed to get width");
    width = GUINT32_FROM_BE (val);
    ret = GTL_GCReadPort (src->hDevPort, 0x30224, &val, &datasize);
    HANDLE_GTL_ERROR ("Failed to get height");
    height = GUINT32_FROM_BE (val);

    bpp = 8;
  }

  if (!gst_genicamsrc_prepare_buffers (src)) {
    GST_ELEMENT_ERROR (src, RESOURCE, TOO_LAZY, ("Failed to prepare buffers"),
        (NULL));
    goto error;
  }

  {
    ret =
        GTL_GCRegisterEvent (src->hDS, EVENT_NEW_BUFFER, &src->hNewBufferEvent);
    HANDLE_GTL_ERROR ("Failed to register New Buffer event");
  }

  ret =
      GTL_DSStartAcquisition (src->hDS, ACQ_START_FLAGS_DEFAULT,
      GENTL_INFINITE);
  HANDLE_GTL_ERROR ("Failed to start stream acquisition");

  {
    // TODO: use Genicam node map for this
    guint32 val;
    size_t datasize;

    /* set AcquisitionMode to Continuous */
    val = GUINT32_TO_BE (2);
    datasize = sizeof (val);
    ret = GTL_GCWritePort (src->hDevPort, 0x40004, &val, &datasize);
    HANDLE_GTL_ERROR ("Failed to start device acquisition");

    /* send AcquisitionStart command */
    val = GUINT32_TO_BE (1);
    datasize = sizeof (val);
    ret = GTL_GCWritePort (src->hDevPort, 0x40024, &val, &datasize);
    HANDLE_GTL_ERROR ("Failed to start device acquisition");
  }

  /* create caps */
  if (src->caps) {
    gst_caps_unref (src->caps);
    src->caps = NULL;
  }

  gst_video_info_init (&vinfo);

  if (bpp <= 8) {
    gst_video_info_set_format (&vinfo, GST_VIDEO_FORMAT_GRAY8, width, height);
    src->caps = gst_video_info_to_caps (&vinfo);
  } else if (bpp > 8 && bpp <= 16) {
    GValue val = G_VALUE_INIT;
    GstStructure *s;

    if (G_BYTE_ORDER == G_LITTLE_ENDIAN) {
      gst_video_info_set_format (&vinfo, GST_VIDEO_FORMAT_GRAY16_LE, width,
          height);
    } else if (G_BYTE_ORDER == G_BIG_ENDIAN) {
      gst_video_info_set_format (&vinfo, GST_VIDEO_FORMAT_GRAY16_BE, width,
          height);
    }
    src->caps = gst_video_info_to_caps (&vinfo);

    /* set bpp, extra info for GRAY16 so elements can scale properly */
    s = gst_caps_get_structure (src->caps, 0);
    g_value_init (&val, G_TYPE_INT);
    g_value_set_int (&val, bpp);
    gst_structure_set_value (s, "bpp", &val);
    g_value_unset (&val);
  } else {
    GST_ELEMENT_ERROR (src, STREAM, WRONG_TYPE,
        ("Unknown or unsupported bit depth (%d).", bpp), (NULL));
    return FALSE;
  }

  src->height = vinfo.height;
  src->gst_stride = GST_VIDEO_INFO_COMP_STRIDE (&vinfo, 0);

  GST_DEBUG_OBJECT (src, "starting acquisition");
//TODO: start acquisition engine

  /* TODO: check timestamps on buffers vs start time */
  src->acq_start_time =
      gst_clock_get_time (gst_element_get_clock (GST_ELEMENT (src)));

  return TRUE;

error:
  if (src->hDS) {
    GTL_DSClose (src->hDS);
    src->hDS = NULL;
  }

  if (src->hDEV) {
    GTL_DevClose (src->hDEV);
    src->hDEV = NULL;
  }

  if (src->hIF) {
    GTL_IFClose (src->hIF);
    src->hIF = NULL;
  }

  if (src->hTL) {
    GTL_TLClose (src->hTL);
    src->hTL = NULL;
  }

  GTL_GCCloseLib ();

  return FALSE;
}
Ejemplo n.º 22
0
static void
check_conversion (TestFrame * frames, guint size)
{
  gint i, j, k, l;
  gint ref_count = 0;

  for (i = 0; i < size; i++) {
    GstBuffer *inbuf;
    GstVideoInfo in_info;
    gint in_width = frames[i].width;
    gint in_height = frames[i].height;
    GstVideoFormat in_v_format = frames[i].v_format;
    gchar *in_data[GST_VIDEO_MAX_PLANES] = { 0 };
    GstGLMemory *in_mem[GST_VIDEO_MAX_PLANES] = { 0 };
    GstVideoFrame in_frame;
    GstCaps *in_caps;

    gst_video_info_set_format (&in_info, in_v_format, in_width, in_height);
    in_caps = gst_video_info_to_caps (&in_info);
    gst_caps_set_features (in_caps, 0,
        gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));

    for (j = 0; j < GST_VIDEO_INFO_N_PLANES (&in_info); j++) {
      in_data[j] = frames[i].data[j];
    }

    /* create GL buffer */
    ref_count += GST_VIDEO_INFO_N_PLANES (&in_info);
    inbuf = gst_buffer_new ();
    fail_unless (gst_gl_memory_setup_wrapped (context, GST_GL_TEXTURE_TARGET_2D,
            &in_info, NULL, (gpointer *) in_data, in_mem, &ref_count,
            _frame_unref));

    for (j = 0; j < GST_VIDEO_INFO_N_PLANES (&in_info); j++) {
      gst_buffer_append_memory (inbuf, (GstMemory *) in_mem[j]);
    }

    fail_unless (gst_video_frame_map (&in_frame, &in_info, inbuf,
            GST_MAP_READ));

    /* sanity check that the correct values were wrapped */
    for (j = 0; j < GST_VIDEO_INFO_N_PLANES (&in_info); j++) {
      for (k = 0; k < _video_info_plane_size (&in_info, j); k++) {
        if (in_data[j][k] != IGNORE_MAGIC)
          fail_unless (((gchar *) in_frame.data[j])[k] == in_data[j][k]);
      }
    }

    for (j = 0; j < size; j++) {
      GstBuffer *outbuf;
      GstVideoInfo out_info;
      gint out_width = frames[j].width;
      gint out_height = frames[j].height;
      GstVideoFormat out_v_format = frames[j].v_format;
      gchar *out_data[GST_VIDEO_MAX_PLANES] = { 0 };
      GstVideoFrame out_frame;
      GstCaps *out_caps;

      gst_video_info_set_format (&out_info, out_v_format, out_width,
          out_height);
      out_caps = gst_video_info_to_caps (&out_info);
      gst_caps_set_features (out_caps, 0,
          gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));

      for (k = 0; k < GST_VIDEO_INFO_N_PLANES (&out_info); k++) {
        out_data[k] = frames[j].data[k];
      }

      gst_gl_color_convert_set_caps (convert, in_caps, out_caps);

      /* convert the data */
      outbuf = gst_gl_color_convert_perform (convert, inbuf);
      if (outbuf == NULL) {
        const gchar *in_str = gst_video_format_to_string (in_v_format);
        const gchar *out_str = gst_video_format_to_string (out_v_format);
        GST_WARNING ("failed to convert from %s to %s", in_str, out_str);
      }

      fail_unless (gst_video_frame_map (&out_frame, &out_info, outbuf,
              GST_MAP_READ));

      /* check that the converted values are correct */
      for (k = 0; k < GST_VIDEO_INFO_N_PLANES (&out_info); k++) {
        for (l = 0; l < _video_info_plane_size (&out_info, k); l++) {
          gchar out_pixel = ((gchar *) out_frame.data[k])[l];
          if (out_data[k][l] != IGNORE_MAGIC && out_pixel != IGNORE_MAGIC)
            fail_unless (out_pixel == out_data[k][l]);
          /* FIXME: check alpha clobbering */
        }
      }

      gst_caps_unref (out_caps);
      gst_video_frame_unmap (&out_frame);
      gst_buffer_unref (outbuf);
    }

    gst_caps_unref (in_caps);
    gst_video_frame_unmap (&in_frame);
    gst_buffer_unref (inbuf);

    fail_unless_equals_int (ref_count, 0);
  }
}
Ejemplo n.º 23
0
static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer)
{
    WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
    WebKitVideoSinkPrivate* priv = sink->priv;

    g_mutex_lock(&priv->bufferMutex);

    if (priv->unlocked) {
        g_mutex_unlock(&priv->bufferMutex);
        return GST_FLOW_OK;
    }

    priv->buffer = gst_buffer_ref(buffer);

    GstCaps* caps;
    // The video info structure is valid only if the sink handled an allocation query.
    if (GST_VIDEO_INFO_FORMAT(&priv->info) != GST_VIDEO_FORMAT_UNKNOWN)
        caps = gst_video_info_to_caps(&priv->info);
    else
        caps = priv->currentCaps;

    GstVideoFormat format;
    IntSize size;
    int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
    if (!getVideoSizeAndFormatFromCaps(caps, &size, &format, &pixelAspectRatioNumerator, &pixelAspectRatioDenominator, &stride)) {
        gst_caps_unref(caps);
        gst_buffer_unref(buffer);
        g_mutex_unlock(&priv->bufferMutex);
        return GST_FLOW_ERROR;
    }

    gst_caps_unref(caps);

    // Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't.
    // Here we convert to Cairo's ARGB.
    if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) {
        // Because GstBaseSink::render() only owns the buffer reference in the
        // method scope we can't use gst_buffer_make_writable() here. Also
        // The buffer content should not be changed here because the same buffer
        // could be passed multiple times to this method (in theory).

        GstBuffer* newBuffer = createGstBuffer(buffer);

        // Check if allocation failed.
        if (G_UNLIKELY(!newBuffer)) {
            g_mutex_unlock(&priv->bufferMutex);
            return GST_FLOW_ERROR;
        }

        // We don't use Color::premultipliedARGBFromColor() here because
        // one function call per video pixel is just too expensive:
        // For 720p/PAL for example this means 1280*720*25=23040000
        // function calls per second!
        GstMapInfo sourceInfo;
        GstMapInfo destinationInfo;
        gst_buffer_map(buffer, &sourceInfo, GST_MAP_READ);
        const guint8* source = sourceInfo.data;
        gst_buffer_map(newBuffer, &destinationInfo, GST_MAP_WRITE);
        guint8* destination = destinationInfo.data;

        for (int x = 0; x < size.Height; x++) {
            for (int y = 0; y < size.Width; y++) {
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
                unsigned short alpha = source[3];
                destination[0] = (source[0] * alpha + 128) / 255;
                destination[1] = (source[1] * alpha + 128) / 255;
                destination[2] = (source[2] * alpha + 128) / 255;
                destination[3] = alpha;
#else
                unsigned short alpha = source[0];
                destination[0] = alpha;
                destination[1] = (source[1] * alpha + 128) / 255;
                destination[2] = (source[2] * alpha + 128) / 255;
                destination[3] = (source[3] * alpha + 128) / 255;
#endif
                source += 4;
                destination += 4;
            }
        }

        gst_buffer_unmap(buffer, &sourceInfo);
        gst_buffer_unmap(newBuffer, &destinationInfo);
        gst_buffer_unref(buffer);
        buffer = priv->buffer = newBuffer;
    }

    // This should likely use a lower priority, but glib currently starves
    // lower priority sources.
    // See: https://bugzilla.gnome.org/show_bug.cgi?id=610830.
    priv->timeoutId = g_timeout_add_full(G_PRIORITY_DEFAULT, 0, webkitVideoSinkTimeoutCallback,
                                         gst_object_ref(sink), (GDestroyNotify) gst_object_unref);
    g_source_set_name_by_id(priv->timeoutId, "[WebKit] webkitVideoSinkTimeoutCallback");

    if (!priv->silent)
        print_buffer_metadata(sink, buffer);

    g_cond_wait(&priv->dataCondition, &priv->bufferMutex);
    g_mutex_unlock(&priv->bufferMutex);
    return GST_FLOW_OK;
}
Ejemplo n.º 24
0
static GstCaps *
gst_phoenixsrc_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
{
  GstPhoenixSrc *src = GST_PHOENIX_SRC (bsrc);
  etStat eStat = PHX_OK;        /* Status variable */
  etParamValue eParamValue = PHX_INVALID_PARAMVALUE;
  ui32 dwParamValue = 0;
  guint32 phx_format;
  gint width, height;
  gint bpp, depth, endianness;
  GstVideoFormat videoFormat;
  gboolean is_gray16 = FALSE, is_bayer = FALSE;
  GstVideoInfo vinfo;
  GstCaps *caps;

  if (!src->hCamera) {
    return gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (src));
  }

  /* Create video info */
  gst_video_info_init (&vinfo);

  /* Get format (mono, Bayer, RBG, etc.) */
  eStat = PHX_ParameterGet (src->hCamera, PHX_DST_FORMAT, &dwParamValue);
  if (PHX_OK != eStat)
    goto ResourceSettingsError;
  phx_format = dwParamValue;

  /* Get endianness */
  eStat = PHX_ParameterGet (src->hCamera, PHX_DST_ENDIAN, &dwParamValue);
  if (PHX_OK != eStat)
    goto ResourceSettingsError;
  endianness =
      (dwParamValue == PHX_DST_LITTLE_ENDIAN) ? G_LITTLE_ENDIAN : G_BIG_ENDIAN;

  /* get width */
  eStat =
      PHX_ParameterGet (src->hCamera, PHX_ROI_XLENGTH_SCALED, &dwParamValue);
  if (PHX_OK != eStat)
    goto ResourceSettingsError;
  width = dwParamValue;

  /* get height */
  eStat =
      PHX_ParameterGet (src->hCamera, PHX_ROI_YLENGTH_SCALED, &dwParamValue);
  if (PHX_OK != eStat)
    goto ResourceSettingsError;
  height = dwParamValue;

  switch (phx_format) {
    case PHX_DST_FORMAT_Y8:
      videoFormat = GST_VIDEO_FORMAT_GRAY8;
      break;
    case PHX_DST_FORMAT_Y10:
      bpp = 10;
      is_gray16 = TRUE;
      break;
    case PHX_DST_FORMAT_Y12:
      bpp = 12;
      is_gray16 = TRUE;
      break;
    case PHX_DST_FORMAT_Y14:
      bpp = 14;
      is_gray16 = TRUE;
      break;
    case PHX_DST_FORMAT_Y16:
      bpp = 16;
      is_gray16 = TRUE;
      break;
    case PHX_DST_FORMAT_BAY8:
      bpp = 8;
      depth = 8;
      is_bayer = TRUE;
      break;
    case PHX_DST_FORMAT_BAY10:
      bpp = 10;
      depth = 16;
      is_bayer = TRUE;
      break;
    case PHX_DST_FORMAT_BAY12:
      bpp = 12;
      depth = 16;
      is_bayer = TRUE;
      break;
    case PHX_DST_FORMAT_BAY14:
      bpp = 14;
      depth = 16;
      is_bayer = TRUE;
      break;
    case PHX_DST_FORMAT_BAY16:
      bpp = 16;
      depth = 16;
      is_bayer = TRUE;
      break;
    case PHX_DST_FORMAT_RGB15:
      videoFormat = GST_VIDEO_FORMAT_RGB15;
      break;
    case PHX_DST_FORMAT_RGB16:
      videoFormat = GST_VIDEO_FORMAT_RGB16;
      break;
    case PHX_DST_FORMAT_RGB24:
      videoFormat = GST_VIDEO_FORMAT_RGB;
      break;
    case PHX_DST_FORMAT_RGB32: /* FIXME: what is the format of this? */
    case PHX_DST_FORMAT_XRGB32:
      videoFormat = GST_VIDEO_FORMAT_xRGB;
      break;
    default:
      videoFormat = GST_VIDEO_FORMAT_UNKNOWN;
  }

  if (is_gray16)
    videoFormat = (endianness == G_LITTLE_ENDIAN) ?
        GST_VIDEO_FORMAT_GRAY16_LE : GST_VIDEO_FORMAT_GRAY16_BE;

  if (is_bayer) {
    const gchar *bay_fmt;
    eStat = PHX_ParameterGet (src->hCamera, PHX_CAM_SRC_COL, &dwParamValue);
    if (PHX_OK != eStat)
      goto ResourceSettingsError;
    switch (dwParamValue) {
      case PHX_CAM_SRC_BAY_RGGB:
        bay_fmt = (depth == 16) ? "rggb16" : "rggb";
        break;
      case PHX_CAM_SRC_BAY_GRBG:
        bay_fmt = (depth == 16) ? "grbg16" : "grbg";
        break;
      case PHX_CAM_SRC_BAY_GBRG:
        bay_fmt = (depth == 16) ? "gbrg16" : "gbrg";
        break;
      case PHX_CAM_SRC_BAY_BGGR:
        bay_fmt = (depth == 16) ? "bggr16" : "bggr";
        break;
      default:
        GST_ERROR_OBJECT (src, "Unknown PHX_CAM_SRC_COL=%d", dwParamValue);
        goto Error;
    }

    if (depth == 8) {
      caps = gst_caps_new_simple ("video/x-bayer",
          "format", G_TYPE_STRING, bay_fmt,
          "width", G_TYPE_INT, width,
          "height", G_TYPE_INT, height,
          "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
    } else if (depth == 16) {
      caps = gst_caps_new_simple ("video/x-bayer",
          "format", G_TYPE_STRING, bay_fmt,
          "bpp", G_TYPE_INT, bpp,
          "endianness", G_TYPE_INT, endianness,
          "width", G_TYPE_INT, width,
          "height", G_TYPE_INT, height,
          "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
    }
  } else if (videoFormat != GST_VIDEO_FORMAT_UNKNOWN) {
    vinfo.finfo = gst_video_format_get_info (videoFormat);
    vinfo.width = width;
    vinfo.height = height;

    caps = gst_video_info_to_caps (&vinfo);

    if (is_gray16) {
      GValue val = G_VALUE_INIT;
      GstStructure *s = gst_caps_get_structure (caps, 0);
      g_value_init (&val, G_TYPE_INT);
      g_value_set_int (&val, bpp);
      gst_structure_set_value (s, "bpp", &val);
      g_value_unset (&val);
    }
  } else {
    GST_ELEMENT_ERROR (src, STREAM, WRONG_TYPE,
        (("Unknown or unsupported color format.")), (NULL));
    goto Error;
  }

  /* get buffer size; width (in bytes) and height (in lines) */
  eStat = PHX_ParameterGet (src->hCamera, PHX_BUF_DST_XLENGTH, &dwParamValue);
  if (PHX_OK != eStat)
    goto ResourceSettingsError;
  src->phx_stride = dwParamValue;
  eStat = PHX_ParameterGet (src->hCamera, PHX_BUF_DST_YLENGTH, &dwParamValue);
  if (PHX_OK != eStat)
    goto ResourceSettingsError;
  /* TODO: should we be using PHX_BUF_DST_YLENGTH or PHX_ROI_YLENGTH_SCALED
     for height? */
  g_assert (dwParamValue == height);

  GST_DEBUG_OBJECT (src, "The caps before filtering are %" GST_PTR_FORMAT,
      caps);

  if (filter) {
    GstCaps *tmp = gst_caps_intersect (caps, filter);
    gst_caps_unref (caps);
    caps = tmp;
  }

  GST_DEBUG_OBJECT (src, "The caps after filtering are %" GST_PTR_FORMAT, caps);

  return caps;

ResourceSettingsError:
  GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS,
      (("Failed to get Phoenix parameters.")), (NULL));

Error:
  return NULL;
}
Ejemplo n.º 25
0
gboolean gst_imx_blitter_set_input_frame(GstImxBlitter *blitter, GstBuffer *frame)
{
	gboolean ret;
	GstImxPhysMemMeta *phys_mem_meta;
	GstImxBlitterClass *klass;

	g_assert(blitter != NULL);
	klass = GST_IMX_BLITTER_CLASS(G_OBJECT_GET_CLASS(blitter));
	g_assert(klass->set_input_frame != NULL);

	if (frame == NULL)
		return klass->set_input_frame(blitter, NULL);

	phys_mem_meta = GST_IMX_PHYS_MEM_META_GET(frame);

	if ((phys_mem_meta == NULL) || (phys_mem_meta->phys_addr == 0))
	{
		GstFlowReturn flow_ret;
		GstBuffer *internal_input_frame;

		/* No DMA memory present; the input frame needs to be copied to an internal input frame */

		GST_TRACE_OBJECT(blitter, "input frame does not use DMA memory - copying input frame to internal frame");

		{
			if (blitter->dma_bufferpool == NULL)
			{
				GST_TRACE_OBJECT(blitter, "need to create internal bufferpool");

				/* DMA bufferpool does not exist yet - create it now,
				 * so that it can in turn create the internal input frame */

				GstCaps *caps = gst_video_info_to_caps(&(blitter->input_video_info));

				blitter->dma_bufferpool = gst_imx_blitter_create_bufferpool(
					blitter,
					caps,
					blitter->input_video_info.size,
					0, 0,
					NULL,
					NULL
				);

				gst_caps_unref(caps);

				if (blitter->dma_bufferpool == NULL)
				{
					GST_ERROR_OBJECT(blitter, "failed to create internal bufferpool");
					return FALSE;
				}
			}

			/* Future versions of this code may propose the internal bufferpool upstream;
			 * hence the is_active check */
			if (!gst_buffer_pool_is_active(blitter->dma_bufferpool))
				gst_buffer_pool_set_active(blitter->dma_bufferpool, TRUE);
		}

		/* Create new internal input frame */
		GST_TRACE_OBJECT(blitter, "acquiring buffer for internal input frame");
		internal_input_frame = NULL;
		flow_ret = gst_buffer_pool_acquire_buffer(blitter->dma_bufferpool, &internal_input_frame, NULL);
		if (flow_ret != GST_FLOW_OK)
		{
			if (internal_input_frame != NULL)
				gst_buffer_unref(internal_input_frame);

			GST_ERROR_OBJECT(blitter, "error acquiring input frame buffer: %s", gst_pad_mode_get_name(flow_ret));
			return FALSE;
		}

		/* Copy the input buffer's pixels to the internal input frame */
		{
			GstVideoFrame input_vidframe, internal_input_vidframe;

			gst_video_frame_map(&input_vidframe, &(blitter->input_video_info), frame, GST_MAP_READ);
			gst_video_frame_map(&internal_input_vidframe, &(blitter->input_video_info), internal_input_frame, GST_MAP_WRITE);

			/* gst_video_frame_copy() makes sure stride and plane offset values from both frames are respected */
			gst_video_frame_copy(&internal_input_vidframe, &input_vidframe);

			/* copy interlace flags */
			GST_BUFFER_FLAGS(internal_input_frame) |= (GST_BUFFER_FLAGS(frame) & (GST_VIDEO_BUFFER_FLAG_INTERLACED | GST_VIDEO_BUFFER_FLAG_TFF | GST_VIDEO_BUFFER_FLAG_RFF | GST_VIDEO_BUFFER_FLAG_ONEFIELD));

			gst_video_frame_unmap(&internal_input_vidframe);
			gst_video_frame_unmap(&input_vidframe);
		}

		ret = klass->set_input_frame(blitter, internal_input_frame);

		gst_buffer_unref(internal_input_frame);
	}
	else
	{
		GST_TRACE_OBJECT(blitter, "input frame uses DMA memory - setting it directly as input frame");
		ret = klass->set_input_frame(blitter, frame);
	}

	return ret;
}
Ejemplo n.º 26
0
/* Return the possible output caps based on inputs and downstream prefs */
static GstCaps *
_update_caps (GstVideoAggregator * vagg, GstCaps * caps)
{
  GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
  GList *l;
  gint best_width = -1, best_height = -1;
  gdouble best_fps = -1, cur_fps;
  gint best_fps_n = 0, best_fps_d = 1;
  GstVideoInfo *mix_info;
  GstCaps *blend_caps, *tmp_caps;
  GstCaps *out_caps;

  GST_OBJECT_LOCK (vagg);

  for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
    GstVideoAggregatorPad *pad = l->data;
    GstVideoInfo tmp = pad->info;
    gint this_width, this_height;
    gint fps_n, fps_d;

    if (!pad->info.finfo)
      continue;

    /* This can happen if we release a pad and another pad hasn't been negotiated_caps yet */
    if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
      continue;

    /* Convert to per-view width/height for unpacked forms */
    gst_video_multiview_video_info_change_mode (&tmp,
        GST_VIDEO_MULTIVIEW_MODE_SEPARATED, GST_VIDEO_MULTIVIEW_FLAGS_NONE);

    this_width = GST_VIDEO_INFO_WIDTH (&tmp);
    this_height = GST_VIDEO_INFO_HEIGHT (&tmp);
    fps_n = GST_VIDEO_INFO_FPS_N (&tmp);
    fps_d = GST_VIDEO_INFO_FPS_D (&tmp);

    GST_INFO_OBJECT (vagg, "Input pad %" GST_PTR_FORMAT
        " w %u h %u", pad, this_width, this_height);

    if (this_width == 0 || this_height == 0)
      continue;

    if (best_width < this_width)
      best_width = this_width;
    if (best_height < this_height)
      best_height = this_height;

    if (fps_d == 0)
      cur_fps = 0.0;
    else
      gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);

    if (best_fps < cur_fps) {
      best_fps = cur_fps;
      best_fps_n = fps_n;
      best_fps_d = fps_d;
    }

    /* FIXME: Preserve PAR for at least one input when different sized inputs */
  }
  GST_OBJECT_UNLOCK (vagg);

  mix_info = &mix->mix_info;
  gst_video_info_set_format (mix_info, GST_VIDEO_FORMAT_RGBA, best_width,
      best_height);

  GST_VIDEO_INFO_FPS_N (mix_info) = best_fps_n;
  GST_VIDEO_INFO_FPS_D (mix_info) = best_fps_d;

  GST_VIDEO_INFO_MULTIVIEW_MODE (mix_info) = GST_VIDEO_MULTIVIEW_MODE_SEPARATED;
  GST_VIDEO_INFO_VIEWS (mix_info) = 2;

  /* FIXME: If input is marked as flipped or flopped, preserve those flags */
  GST_VIDEO_INFO_MULTIVIEW_FLAGS (mix_info) = GST_VIDEO_MULTIVIEW_FLAGS_NONE;

  /* Choose our output format based on downstream preferences */
  blend_caps = gst_video_info_to_caps (mix_info);

  gst_caps_set_features (blend_caps, 0,
      gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));

  tmp_caps = get_converted_caps (GST_GL_STEREO_MIX (vagg), blend_caps);
  gst_caps_unref (blend_caps);

  out_caps = gst_caps_intersect (caps, tmp_caps);
  gst_caps_unref (tmp_caps);

  GST_DEBUG_OBJECT (vagg, "Possible output caps %" GST_PTR_FORMAT, out_caps);

  return out_caps;
}
Ejemplo n.º 27
0
static gboolean
gst_vaapidecode_update_src_caps (GstVaapiDecode * decode)
{
  GstVideoDecoder *const vdec = GST_VIDEO_DECODER (decode);
  GstVideoCodecState *state, *ref_state;
  GstVideoInfo *vi;
  GstVideoFormat format = GST_VIDEO_FORMAT_I420;

  if (!decode->input_state)
    return FALSE;

  ref_state = decode->input_state;

  GstCapsFeatures *features = NULL;
  GstVaapiCapsFeature feature;

  feature =
      gst_vaapi_find_preferred_caps_feature (GST_VIDEO_DECODER_SRC_PAD (vdec),
      GST_VIDEO_INFO_FORMAT (&ref_state->info), &format);

  if (feature == GST_VAAPI_CAPS_FEATURE_NOT_NEGOTIATED)
    return FALSE;

  switch (feature) {
#if (USE_GLX || USE_EGL)
    case GST_VAAPI_CAPS_FEATURE_GL_TEXTURE_UPLOAD_META:
      features =
          gst_caps_features_new
          (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, NULL);
      break;
#endif
#if GST_CHECK_VERSION(1,3,1)
    case GST_VAAPI_CAPS_FEATURE_VAAPI_SURFACE:
      features =
          gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_VAAPI_SURFACE, NULL);
      break;
#endif
    default:
      break;
  }

  state = gst_video_decoder_set_output_state (vdec, format,
      ref_state->info.width, ref_state->info.height, ref_state);
  if (!state || state->info.width == 0 || state->info.height == 0)
    return FALSE;

  vi = &state->info;

  state->caps = gst_video_info_to_caps (vi);
  if (features)
    gst_caps_set_features (state->caps, 0, features);
  GST_INFO_OBJECT (decode, "new src caps = %" GST_PTR_FORMAT, state->caps);
  gst_caps_replace (&decode->srcpad_caps, state->caps);
  gst_video_codec_state_unref (state);

  gint fps_n = GST_VIDEO_INFO_FPS_N (vi);
  gint fps_d = GST_VIDEO_INFO_FPS_D (vi);
  if (fps_n <= 0 || fps_d <= 0) {
    GST_DEBUG_OBJECT (decode, "forcing 25/1 framerate for latency calculation");
    fps_n = 25;
    fps_d = 1;
  }

  /* For parsing/preparation purposes we'd need at least 1 frame
   * latency in general, with perfectly known unit boundaries (NALU,
   * AU), and up to 2 frames when we need to wait for the second frame
   * start to determine the first frame is complete */
  GstClockTime latency = gst_util_uint64_scale (2 * GST_SECOND, fps_d, fps_n);
  gst_video_decoder_set_latency (vdec, latency, latency);

  return TRUE;
}
Ejemplo n.º 28
0
static gboolean
gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
  GstStructure *structure;
  GstRtpVRawDepay *rtpvrawdepay;
  gint clock_rate;
  const gchar *str;
  gint format, width, height, depth, pgroup, xinc, yinc;
  GstCaps *srccaps;
  gboolean res;
  GstFlowReturn ret;

  rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);

  structure = gst_caps_get_structure (caps, 0);

  xinc = yinc = 1;

  if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
    clock_rate = 90000;         /* default */
  depayload->clock_rate = clock_rate;

  if (!(str = gst_structure_get_string (structure, "width")))
    goto no_width;
  width = atoi (str);

  if (!(str = gst_structure_get_string (structure, "height")))
    goto no_height;
  height = atoi (str);

  if (!(str = gst_structure_get_string (structure, "depth")))
    goto no_depth;
  depth = atoi (str);

  /* optional interlace value but we don't handle interlaced
   * formats yet */
  if (gst_structure_get_string (structure, "interlace"))
    goto interlaced;

  if (!(str = gst_structure_get_string (structure, "sampling")))
    goto no_sampling;

  if (!strcmp (str, "RGB")) {
    format = GST_VIDEO_FORMAT_RGB;
    pgroup = 3;
  } else if (!strcmp (str, "RGBA")) {
    format = GST_VIDEO_FORMAT_RGBA;
    pgroup = 4;
  } else if (!strcmp (str, "BGR")) {
    format = GST_VIDEO_FORMAT_BGR;
    pgroup = 3;
  } else if (!strcmp (str, "BGRA")) {
    format = GST_VIDEO_FORMAT_BGRA;
    pgroup = 4;
  } else if (!strcmp (str, "YCbCr-4:4:4")) {
    format = GST_VIDEO_FORMAT_AYUV;
    pgroup = 3;
  } else if (!strcmp (str, "YCbCr-4:2:2")) {
    if (depth == 8) {
      format = GST_VIDEO_FORMAT_UYVY;
      pgroup = 4;
    } else if (depth == 10) {
      format = GST_VIDEO_FORMAT_UYVP;
      pgroup = 5;
    } else
      goto unknown_format;
    xinc = 2;
  } else if (!strcmp (str, "YCbCr-4:2:0")) {
    format = GST_VIDEO_FORMAT_I420;
    pgroup = 6;
    xinc = yinc = 2;
  } else if (!strcmp (str, "YCbCr-4:1:1")) {
    format = GST_VIDEO_FORMAT_Y41B;
    pgroup = 6;
    xinc = 4;
  } else {
    goto unknown_format;
  }

  gst_video_info_init (&rtpvrawdepay->vinfo);
  gst_video_info_set_format (&rtpvrawdepay->vinfo, format, width, height);
  GST_VIDEO_INFO_FPS_N (&rtpvrawdepay->vinfo) = 0;
  GST_VIDEO_INFO_FPS_D (&rtpvrawdepay->vinfo) = 1;

  rtpvrawdepay->pgroup = pgroup;
  rtpvrawdepay->xinc = xinc;
  rtpvrawdepay->yinc = yinc;

  srccaps = gst_video_info_to_caps (&rtpvrawdepay->vinfo);
  res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
  gst_caps_unref (srccaps);

  GST_DEBUG_OBJECT (depayload, "width %d, height %d, format %d", width, height,
      format);
  GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d, pgroup %d",
      xinc, yinc, pgroup);

  /* negotiate a bufferpool */
  if ((ret = gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, srccaps,
              &rtpvrawdepay->vinfo)) != GST_FLOW_OK)
    goto no_bufferpool;

  return res;

  /* ERRORS */
no_width:
  {
    GST_ERROR_OBJECT (depayload, "no width specified");
    return FALSE;
  }
no_height:
  {
    GST_ERROR_OBJECT (depayload, "no height specified");
    return FALSE;
  }
no_depth:
  {
    GST_ERROR_OBJECT (depayload, "no depth specified");
    return FALSE;
  }
interlaced:
  {
    GST_ERROR_OBJECT (depayload, "interlaced formats not supported yet");
    return FALSE;
  }
no_sampling:
  {
    GST_ERROR_OBJECT (depayload, "no sampling specified");
    return FALSE;
  }
unknown_format:
  {
    GST_ERROR_OBJECT (depayload, "unknown sampling format '%s'", str);
    return FALSE;
  }
no_bufferpool:
  {
    GST_DEBUG_OBJECT (depayload, "no bufferpool");
    return FALSE;
  }
}
Ejemplo n.º 29
0
static gboolean
gst_mpeg2dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
{
  GstMpeg2dec *dec = GST_MPEG2DEC (decoder);
  GstVideoCodecState *state;
  GstBufferPool *pool;
  guint size, min, max;
  GstStructure *config;
  GstAllocator *allocator;
  GstAllocationParams params;
  gboolean update_allocator;

  /* Set allocation parameters to guarantee 16-byte aligned output buffers */
  if (gst_query_get_n_allocation_params (query) > 0) {
    gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
    update_allocator = TRUE;
  } else {
    allocator = NULL;
    gst_allocation_params_init (&params);
    update_allocator = FALSE;
  }

  params.align = MAX (params.align, 15);

  if (update_allocator)
    gst_query_set_nth_allocation_param (query, 0, allocator, &params);
  else
    gst_query_add_allocation_param (query, allocator, &params);
  if (allocator)
    gst_object_unref (allocator);

  /* Now chain up to the parent class to guarantee that we can
   * get a buffer pool from the query */
  if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
          query))
    return FALSE;

  state = gst_video_decoder_get_output_state (decoder);

  gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);

  dec->has_cropping = FALSE;
  config = gst_buffer_pool_get_config (pool);
  if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
    gst_buffer_pool_config_add_option (config,
        GST_BUFFER_POOL_OPTION_VIDEO_META);
    dec->has_cropping =
        gst_query_find_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE,
        NULL);
  }

  if (dec->has_cropping) {
    GstCaps *caps;

    /* Calculate uncropped size */
    size = MAX (size, dec->decoded_info.size);
    caps = gst_video_info_to_caps (&dec->decoded_info);
    gst_buffer_pool_config_set_params (config, caps, size, min, max);
    gst_caps_unref (caps);
  }

  gst_buffer_pool_set_config (pool, config);

  gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);

  gst_object_unref (pool);
  gst_video_codec_state_unref (state);

  return TRUE;
}
Ejemplo n.º 30
-1
static GstBuffer *
_default_pad_upload_buffer (GstGLMixer * mix, GstGLMixerFrameData * frame,
                            GstBuffer * buffer)
{
    GstVideoAggregatorPad *vaggpad = GST_VIDEO_AGGREGATOR_PAD (frame->pad);
    GstGLMixerPad *pad = frame->pad;
    GstBuffer *uploaded_buf, *gl_buffer;
    GstCaps *gl_caps;
    GstCapsFeatures *gl_features;
    GstVideoInfo gl_info;
    GstVideoFrame gl_frame;
    GstGLSyncMeta *sync_meta;

    gst_video_info_set_format (&gl_info,
                               GST_VIDEO_FORMAT_RGBA,
                               GST_VIDEO_INFO_WIDTH (&vaggpad->info),
                               GST_VIDEO_INFO_HEIGHT (&vaggpad->info));
    gl_features =
        gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY);

    gl_caps = gst_video_info_to_caps (&gl_info);
    gst_caps_set_features (gl_caps, 0, gst_caps_features_copy (gl_features));

    if (!pad->upload) {
        GstCaps *in_caps = gst_pad_get_current_caps (GST_PAD (pad));
        GstCaps *upload_caps = gst_caps_copy (in_caps);

        pad->upload = gst_gl_upload_new (mix->context);

        gst_caps_set_features (upload_caps, 0,
                               gst_caps_features_copy (gl_features));
        gst_gl_upload_set_caps (pad->upload, in_caps, upload_caps);

        if (!pad->convert) {
            pad->convert = gst_gl_color_convert_new (mix->context);

            gst_gl_color_convert_set_caps (pad->convert, upload_caps, gl_caps);
        }

        gst_caps_unref (upload_caps);
        gst_caps_unref (in_caps);
    }

    gst_caps_features_free (gl_features);
    gst_caps_unref (gl_caps);

    sync_meta = gst_buffer_get_gl_sync_meta (vaggpad->buffer);
    if (sync_meta)
        gst_gl_sync_meta_wait (sync_meta);

    if (gst_gl_upload_perform_with_buffer (pad->upload,
                                           vaggpad->buffer, &uploaded_buf) != GST_GL_UPLOAD_DONE) {
        return NULL;
    }

    if (!(gl_buffer = gst_gl_color_convert_perform (pad->convert, uploaded_buf))) {
        gst_buffer_unref (uploaded_buf);
        return NULL;
    }

    if (!gst_video_frame_map (&gl_frame, &gl_info, gl_buffer,
                              GST_MAP_READ | GST_MAP_GL)) {
        gst_buffer_unref (uploaded_buf);
        gst_buffer_unref (gl_buffer);
        return NULL;
    }

    frame->texture = *(guint *) gl_frame.data[0];

    gst_buffer_unref (uploaded_buf);
    gst_video_frame_unmap (&gl_frame);

    return gl_buffer;
}