示例#1
0
static void
gst_mfc_dec_init (GstMFCDec * self)
{
  GstVideoDecoder *decoder = (GstVideoDecoder *) self;

  gst_video_decoder_set_packetized (decoder, TRUE);
}
示例#2
0
static void
gst_rsvg_dec_init (GstRsvgDec * rsvg)
{
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (rsvg);
  gst_video_decoder_set_packetized (decoder, FALSE);
  dec_reset (rsvg);
}
static void
gst_vaapidecode_init (GstVaapiDecode * decode)
{
  GstVideoDecoder *const vdec = GST_VIDEO_DECODER (decode);

  gst_vaapi_plugin_base_init (GST_VAAPI_PLUGIN_BASE (decode), GST_CAT_DEFAULT);

  decode->decoder = NULL;
  decode->decoder_caps = NULL;
  decode->allowed_caps = NULL;

  g_mutex_init (&decode->surface_ready_mutex);
  g_cond_init (&decode->surface_ready);

  gst_video_decoder_set_packetized (vdec, FALSE);

#if !GST_CHECK_VERSION(1,4,0)
  /* Pad through which data comes in to the element */
  GstPad *pad = GST_VAAPI_PLUGIN_BASE_SINK_PAD (decode);
  gst_pad_set_query_function (pad, GST_DEBUG_FUNCPTR (gst_vaapidecode_query));

  /* Pad through which data goes out of the element */
  pad = GST_VAAPI_PLUGIN_BASE_SRC_PAD (decode);
  gst_pad_set_query_function (pad, GST_DEBUG_FUNCPTR (gst_vaapidecode_query));
#endif
}
示例#4
0
static void
gst_mpeg2dec_init (GstMpeg2dec * mpeg2dec)
{
  gst_video_decoder_set_packetized (GST_VIDEO_DECODER (mpeg2dec), TRUE);

  /* initialize the mpeg2dec acceleration */
}
示例#5
0
static void
gst_daala_dec_init (GstDaalaDec * dec)
{
  /* input is packetized,
   * but is not marked that way so data gets parsed and keyframes marked */
  gst_video_decoder_set_packetized (GST_VIDEO_DECODER (dec), FALSE);
}
示例#6
0
static gboolean
gst_pnmdec_start (GstVideoDecoder * decoder)
{
  GstPnmdec *pnmdec = (GstPnmdec *) decoder;

  gst_video_decoder_set_packetized (GST_VIDEO_DECODER (pnmdec), FALSE);
  return TRUE;
}
static void gst_openh264dec_init(GstOpenh264Dec *openh264dec)
{
    openh264dec->priv = GST_OPENH264DEC_GET_PRIVATE(openh264dec);
    openh264dec->priv->decoder = NULL;

    gst_video_decoder_set_packetized(GST_VIDEO_DECODER(openh264dec), TRUE);
    gst_video_decoder_set_needs_format(GST_VIDEO_DECODER(openh264dec), TRUE);
}
示例#8
0
static gboolean
gst_pngdec_sink_event (GstVideoDecoder * bdec, GstEvent * event)
{
  const GstSegment *segment;

  if (GST_EVENT_TYPE (event) != GST_EVENT_SEGMENT)
    goto done;

  gst_event_parse_segment (event, &segment);

  if (segment->format == GST_FORMAT_TIME)
    gst_video_decoder_set_packetized (bdec, TRUE);
  else
    gst_video_decoder_set_packetized (bdec, FALSE);

done:
  return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (bdec, event);
}
示例#9
0
static void
gst_rsvg_dec_init (GstRsvgDec * rsvg)
{
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (rsvg);
  gst_video_decoder_set_packetized (decoder, FALSE);
  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
      (rsvg), TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (rsvg));
}
示例#10
0
static gboolean
gst_pngdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstPngDec *pngdec = (GstPngDec *) decoder;

  if (pngdec->input_state)
    gst_video_codec_state_unref (pngdec->input_state);
  pngdec->input_state = gst_video_codec_state_ref (state);

  if (decoder->input_segment.format == GST_FORMAT_TIME)
    gst_video_decoder_set_packetized (decoder, TRUE);
  else
    gst_video_decoder_set_packetized (decoder, FALSE);

  /* We'll set format later on */

  return TRUE;
}
示例#11
0
static void
gst_openjpeg_dec_init (GstOpenJPEGDec * self)
{
  GstVideoDecoder *decoder = (GstVideoDecoder *) self;

  gst_video_decoder_set_packetized (decoder, TRUE);
  opj_set_default_decoder_parameters (&self->params);
  self->params.cp_limit_decoding = NO_LIMITATION;
}
示例#12
0
static void
gst_amc_video_dec_init (GstAmcVideoDec * self)
{
  gst_video_decoder_set_packetized (GST_VIDEO_DECODER (self), TRUE);
  gst_video_decoder_set_needs_format (GST_VIDEO_DECODER (self), TRUE);

  g_mutex_init (&self->drain_lock);
  g_cond_init (&self->drain_cond);
}
示例#13
0
static gboolean
gst_pngdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstPngDec *pngdec = (GstPngDec *) decoder;
  GstVideoInfo *info = &state->info;

  if (pngdec->input_state)
    gst_video_codec_state_unref (pngdec->input_state);
  pngdec->input_state = gst_video_codec_state_ref (state);

  if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
    gst_video_decoder_set_packetized (decoder, TRUE);
  else
    gst_video_decoder_set_packetized (decoder, FALSE);

  /* We'll set format later on */

  return TRUE;
}
示例#14
0
static void
gst_libde265_dec_init (GstLibde265Dec * dec)
{
    dec->mode = DEFAULT_MODE;
    dec->fps_n = DEFAULT_FPS_N;
    dec->fps_d = DEFAULT_FPS_D;
    _gst_libde265_dec_reset_decoder (dec);
#if GST_CHECK_VERSION(1,0,0)
    gst_video_decoder_set_packetized(GST_VIDEO_DECODER(dec), FALSE);
#endif
}
示例#15
0
static void
gst_daala_dec_init (GstDaalaDec * dec)
{
  /* input is packetized,
   * but is not marked that way so data gets parsed and keyframes marked */
  gst_video_decoder_set_packetized (GST_VIDEO_DECODER (dec), FALSE);
  gst_video_decoder_set_needs_format (GST_VIDEO_DECODER (dec), TRUE);
  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
      (dec), TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
}
示例#16
0
static gboolean
gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
{
  GstJpegDec *jpeg = GST_JPEG_DEC (dec);
  GstVideoInfo *info = &state->info;

  /* FIXME : previously jpegdec would handled input as packetized
   * if the framerate was present. Here we consider it packetized if
   * the fps is != 1/1 */
  if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
    gst_video_decoder_set_packetized (dec, TRUE);
  else
    gst_video_decoder_set_packetized (dec, FALSE);

  if (jpeg->input_state)
    gst_video_codec_state_unref (jpeg->input_state);
  jpeg->input_state = gst_video_codec_state_ref (state);

  return TRUE;
}
示例#17
0
static void
gst_mpeg2dec_init (GstMpeg2dec * mpeg2dec)
{
  gst_video_decoder_set_packetized (GST_VIDEO_DECODER (mpeg2dec), TRUE);
  gst_video_decoder_set_needs_format (GST_VIDEO_DECODER (mpeg2dec), TRUE);
  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
      (mpeg2dec), TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (mpeg2dec));

  /* initialize the mpeg2dec acceleration */
}
static void
gst_theora_dec_init (GstTheoraDec * dec)
{
  dec->telemetry_mv = THEORA_DEF_TELEMETRY_MV;
  dec->telemetry_mbmode = THEORA_DEF_TELEMETRY_MBMODE;
  dec->telemetry_qi = THEORA_DEF_TELEMETRY_QI;
  dec->telemetry_bits = THEORA_DEF_TELEMETRY_BITS;

  /* input is packetized,
   * but is not marked that way so data gets parsed and keyframes marked */
  gst_video_decoder_set_packetized (GST_VIDEO_DECODER (dec), FALSE);
}
示例#19
0
static void
gst_libde265_dec_init (GstLibde265Dec * dec)
{
  dec->format = DEFAULT_FORMAT;
  dec->max_threads = DEFAULT_MAX_THREADS;
  dec->length_size = 4;
  _gst_libde265_dec_reset_decoder (dec);
  gst_video_decoder_set_packetized (GST_VIDEO_DECODER (dec), TRUE);
  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
      (dec), TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
}
示例#20
0
static void
gst_vaapidecode_init (GstVaapiDecode * decode)
{
  GstVideoDecoder *const vdec = GST_VIDEO_DECODER (decode);

  gst_vaapi_plugin_base_init (GST_VAAPI_PLUGIN_BASE (decode), GST_CAT_DEFAULT);

  g_mutex_init (&decode->surface_ready_mutex);
  g_cond_init (&decode->surface_ready);

  gst_video_decoder_set_packetized (vdec, FALSE);
}
示例#21
0
static gboolean
gst_jpeg_dec_start (GstVideoDecoder * bdec)
{
  GstJpegDec *dec = (GstJpegDec *) bdec;

  dec->saw_header = FALSE;
  dec->parse_entropy_len = 0;
  dec->parse_resync = FALSE;

  gst_video_decoder_set_packetized (bdec, FALSE);

  return TRUE;
}
示例#22
0
static void
gst_openjpeg_dec_init (GstOpenJPEGDec * self)
{
  GstVideoDecoder *decoder = (GstVideoDecoder *) self;

  gst_video_decoder_set_packetized (decoder, TRUE);
  gst_video_decoder_set_needs_format (decoder, TRUE);
  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
      (self), TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (self));
  opj_set_default_decoder_parameters (&self->params);
#ifdef HAVE_OPENJPEG_1
  self->params.cp_limit_decoding = NO_LIMITATION;
#endif
}
示例#23
0
static void
gst_vp9_dec_init (GstVP9Dec * gst_vp9_dec)
{
  GstVideoDecoder *decoder = (GstVideoDecoder *) gst_vp9_dec;

  GST_DEBUG_OBJECT (gst_vp9_dec, "gst_vp9_dec_init");
  gst_video_decoder_set_packetized (decoder, TRUE);
  gst_vp9_dec->post_processing = DEFAULT_POST_PROCESSING;
  gst_vp9_dec->post_processing_flags = DEFAULT_POST_PROCESSING_FLAGS;
  gst_vp9_dec->deblocking_level = DEFAULT_DEBLOCKING_LEVEL;
  gst_vp9_dec->noise_level = DEFAULT_NOISE_LEVEL;

  gst_video_decoder_set_needs_format (decoder, TRUE);
  gst_video_decoder_set_use_default_pad_acceptcaps (decoder, TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (decoder));
}
static void
gst_v4l2_video_dec_subinstance_init (GTypeInstance * instance, gpointer g_class)
{
  GstV4l2VideoDecClass *klass = GST_V4L2_VIDEO_DEC_CLASS (g_class);
  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (instance);
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (instance);

  gst_video_decoder_set_packetized (decoder, TRUE);

  self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self),
      V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
      gst_v4l2_get_output, gst_v4l2_set_output, NULL);
  self->v4l2output->no_initial_format = TRUE;
  self->v4l2output->keep_aspect = FALSE;

  self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self),
      V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
      gst_v4l2_get_input, gst_v4l2_set_input, NULL);
  self->v4l2capture->no_initial_format = TRUE;
  self->v4l2output->keep_aspect = FALSE;
}