static gboolean
gst_v4l2_video_dec_set_format (GstVideoDecoder * decoder,
    GstVideoCodecState * state)
{
  gboolean ret = TRUE;
  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);

  GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);

  if (self->input_state) {
    if (gst_v4l2_object_caps_equal (self->v4l2output, state->caps)) {
      GST_DEBUG_OBJECT (self, "Compatible caps");
      goto done;
    }
    gst_video_codec_state_unref (self->input_state);
    self->input_state = NULL;

    /* FIXME we probably need to do more work if pools are active */
  }

  ret = gst_v4l2_object_set_format (self->v4l2output, state->caps);

  if (ret)
    self->input_state = gst_video_codec_state_ref (state);

done:
  return ret;
}
Beispiel #2
0
static gboolean
gst_x265_enc_set_format (GstVideoEncoder * video_enc,
    GstVideoCodecState * state)
{
  GstX265Enc *encoder = GST_X265_ENC (video_enc);
  GstVideoInfo *info = &state->info;
  gboolean level_ok = TRUE;

  /* If the encoder is initialized, do not reinitialize it again if not
   * necessary */
  if (encoder->x265enc) {
    GstVideoInfo *old = &encoder->input_state->info;

    if (info->finfo->format == old->finfo->format
        && info->width == old->width && info->height == old->height
        && info->fps_n == old->fps_n && info->fps_d == old->fps_d
        && info->par_n == old->par_n && info->par_d == old->par_d) {
      gst_video_codec_state_unref (encoder->input_state);
      encoder->input_state = gst_video_codec_state_ref (state);
      return TRUE;
    }

    /* clear out pending frames */
    gst_x265_enc_flush_frames (encoder, TRUE);
  }

  if (encoder->input_state)
    gst_video_codec_state_unref (encoder->input_state);
  encoder->input_state = gst_video_codec_state_ref (state);

  if (!level_ok)
    return FALSE;

  if (!gst_x265_enc_init_encoder (encoder))
    return FALSE;

  if (!gst_x265_enc_set_src_caps (encoder, state->caps)) {
    gst_x265_enc_close_encoder (encoder);
    return FALSE;
  }

  gst_x265_enc_set_latency (encoder);

  return TRUE;
}
Beispiel #3
0
static gboolean
theora_enc_set_format (GstVideoEncoder * benc, GstVideoCodecState * state)
{
    GstTheoraEnc *enc = GST_THEORA_ENC (benc);
    GstVideoInfo *info = &state->info;

    enc->width = GST_VIDEO_INFO_WIDTH (info);
    enc->height = GST_VIDEO_INFO_HEIGHT (info);

    th_info_clear (&enc->info);
    th_info_init (&enc->info);
    /* Theora has a divisible-by-sixteen restriction for the encoded video size but
     * we can define a picture area using pic_width/pic_height */
    enc->info.frame_width = GST_ROUND_UP_16 (enc->width);
    enc->info.frame_height = GST_ROUND_UP_16 (enc->height);
    enc->info.pic_width = enc->width;
    enc->info.pic_height = enc->height;
    switch (GST_VIDEO_INFO_FORMAT (info)) {
    case GST_VIDEO_FORMAT_I420:
        enc->info.pixel_fmt = TH_PF_420;
        break;
    case GST_VIDEO_FORMAT_Y42B:
        enc->info.pixel_fmt = TH_PF_422;
        break;
    case GST_VIDEO_FORMAT_Y444:
        enc->info.pixel_fmt = TH_PF_444;
        break;
    default:
        g_assert_not_reached ();
    }

    enc->info.fps_numerator = enc->fps_n = GST_VIDEO_INFO_FPS_N (info);
    enc->info.fps_denominator = enc->fps_d = GST_VIDEO_INFO_FPS_D (info);
    enc->info.aspect_numerator = GST_VIDEO_INFO_PAR_N (info);
    enc->info.aspect_denominator = GST_VIDEO_INFO_PAR_D (info);

    enc->info.colorspace = TH_CS_UNSPECIFIED;

    /* Save input state */
    if (enc->input_state)
        gst_video_codec_state_unref (enc->input_state);
    enc->input_state = gst_video_codec_state_ref (state);

    /* as done in theora */
    enc->info.keyframe_granule_shift = _ilog (enc->keyframe_force - 1);
    GST_DEBUG_OBJECT (enc,
                      "keyframe_frequency_force is %d, granule shift is %d",
                      enc->keyframe_force, enc->info.keyframe_granule_shift);

    theora_enc_reset (enc);
    enc->initialised = TRUE;

    return TRUE;
}
static gboolean
gst_vaapiencode_set_format (GstVideoEncoder * venc, GstVideoCodecState * state)
{
  GstVaapiEncode *const encode = GST_VAAPIENCODE_CAST (venc);
  gboolean ret;

  g_return_val_if_fail (state->caps != NULL, FALSE);

  if (!set_codec_state (encode, state))
    return FALSE;

  if (!gst_vaapi_plugin_base_set_caps (GST_VAAPI_PLUGIN_BASE (encode),
          state->caps, NULL))
    return FALSE;

  if (encode->input_state)
    gst_video_codec_state_unref (encode->input_state);
  encode->input_state = gst_video_codec_state_ref (state);
  encode->input_state_changed = TRUE;

  ret = gst_pad_start_task (GST_VAAPI_PLUGIN_BASE_SRC_PAD (encode),
      (GstTaskFunction) gst_vaapiencode_buffer_loop, encode, NULL);

  if (!ret)
    return FALSE;

  /* Store some tags */
  {
    GstTagList *tags = gst_tag_list_new_empty ();
    const gchar *encoder, *codec;
    guint bitrate = 0;

    g_object_get (encode, "bitrate", &bitrate, NULL);
    gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_NOMINAL_BITRATE,
        bitrate, NULL);

    if ((encoder =
            gst_element_class_get_metadata (GST_ELEMENT_GET_CLASS (encode),
                GST_ELEMENT_METADATA_LONGNAME)))
      gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_ENCODER, encoder,
          NULL);

    if ((codec =
            gst_vaapi_codec_get_name (gst_vaapi_profile_get_codec
                (gst_vaapi_profile_from_caps (state->caps)))))
      gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_CODEC, codec,
          NULL);

    gst_video_encoder_merge_tags (venc, tags, GST_TAG_MERGE_REPLACE);
    gst_tag_list_unref (tags);
  }

  return TRUE;
}
Beispiel #5
0
static gboolean
gst_pnmdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstPnmdec *pnmdec = (GstPnmdec *) decoder;

  if (pnmdec->input_state)
    gst_video_codec_state_unref (pnmdec->input_state);
  pnmdec->input_state = gst_video_codec_state_ref (state);

  return TRUE;
}
Beispiel #6
0
static gboolean
gst_mpeg2dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder);

  /* Save input state to be used as reference for output state */
  if (mpeg2dec->input_state)
    gst_video_codec_state_unref (mpeg2dec->input_state);
  mpeg2dec->input_state = gst_video_codec_state_ref (state);

  return TRUE;
}
Beispiel #7
0
static gboolean
gst_av1_enc_set_format (GstVideoEncoder * encoder, GstVideoCodecState * state)
{
  GstVideoCodecState *output_state;
  GstAV1Enc *av1enc = GST_AV1_ENC_CAST (encoder);
  GstAV1EncClass *av1enc_class = GST_AV1_ENC_GET_CLASS (av1enc);

  output_state =
      gst_video_encoder_set_output_state (encoder,
      gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder)),
      state);
  gst_video_codec_state_unref (output_state);

  if (av1enc->input_state) {
    gst_video_codec_state_unref (av1enc->input_state);
  }
  av1enc->input_state = gst_video_codec_state_ref (state);

  g_mutex_lock (&av1enc->encoder_lock);
  if (aom_codec_enc_config_default (av1enc_class->codec_algo, &av1enc->aom_cfg,
          0)) {
    gst_av1_codec_error (&av1enc->encoder,
        "Failed to get default codec config.");
    return FALSE;
  }
  GST_DEBUG_OBJECT (av1enc, "Got default encoder config");
  gst_av1_enc_debug_encoder_cfg (&av1enc->aom_cfg);

  gst_av1_enc_set_latency (av1enc);

  av1enc->aom_cfg.g_w = av1enc->input_state->info.width;
  av1enc->aom_cfg.g_h = av1enc->input_state->info.height;
  av1enc->aom_cfg.g_timebase.num = av1enc->input_state->info.fps_d;
  av1enc->aom_cfg.g_timebase.den = av1enc->input_state->info.fps_n;
  /* FIXME : Make configuration properties */
  av1enc->aom_cfg.rc_target_bitrate = 3000;
  av1enc->aom_cfg.g_error_resilient = AOM_ERROR_RESILIENT_DEFAULT;

  GST_DEBUG_OBJECT (av1enc, "Calling encoder init with config:");
  gst_av1_enc_debug_encoder_cfg (&av1enc->aom_cfg);

  if (aom_codec_enc_init (&av1enc->encoder, av1enc_class->codec_algo,
          &av1enc->aom_cfg, 0)) {
    gst_av1_codec_error (&av1enc->encoder, "Failed to initialize encoder");
    return FALSE;
  }
  av1enc->encoder_inited = TRUE;

  GST_AV1_ENC_APPLY_CODEC_CONTROL (av1enc, AOME_SET_CPUUSED, av1enc->cpu_used);
  g_mutex_unlock (&av1enc->encoder_lock);

  return TRUE;
}
static gboolean
gst_pngdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstPngDec *pngdec = (GstPngDec *) decoder;

  if (pngdec->input_state)
    gst_video_codec_state_unref (pngdec->input_state);
  pngdec->input_state = gst_video_codec_state_ref (state);

  /* We'll set format later on */

  return TRUE;
}
static gboolean gst_openh264dec_set_format(GstVideoDecoder *decoder, GstVideoCodecState *state)
{
    GstOpenh264Dec *openh264dec = GST_OPENH264DEC(decoder);

    GST_DEBUG_OBJECT(openh264dec, "openh264_dec_set_format called, caps: %" GST_PTR_FORMAT, state->caps);

    if (openh264dec->priv->input_state) {
      gst_video_codec_state_unref (openh264dec->priv->input_state);
      openh264dec->priv->input_state = NULL;
    }
    openh264dec->priv->input_state = gst_video_codec_state_ref (state);

    return TRUE;
}
Beispiel #10
0
static gboolean
gst_vtdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstStructure *structure;
  CMVideoCodecType cm_format = 0;
  CMFormatDescriptionRef format_description = NULL;
  const char *caps_name;
  GstVtdec *vtdec = GST_VTDEC (decoder);

  GST_DEBUG_OBJECT (vtdec, "set_format");

  structure = gst_caps_get_structure (state->caps, 0);
  caps_name = gst_structure_get_name (structure);
  if (!strcmp (caps_name, "video/x-h264")) {
    cm_format = kCMVideoCodecType_H264;
  } else if (!strcmp (caps_name, "video/mpeg")) {
    cm_format = kCMVideoCodecType_MPEG2Video;
  } else if (!strcmp (caps_name, "image/jpeg")) {
    cm_format = kCMVideoCodecType_JPEG;
  }

  if (cm_format == kCMVideoCodecType_H264 && state->codec_data == NULL) {
    GST_INFO_OBJECT (vtdec, "no codec data, wait for one");
    return TRUE;
  }

  gst_video_info_from_caps (&vtdec->video_info, state->caps);

  if (!gst_vtdec_compute_reorder_queue_length (vtdec, cm_format,
          state->codec_data))
    return FALSE;
  gst_vtdec_set_latency (vtdec);

  if (state->codec_data) {
    format_description = create_format_description_from_codec_data (vtdec,
        cm_format, state->codec_data);
  } else {
    format_description = create_format_description (vtdec, cm_format);
  }

  if (vtdec->format_description)
    CFRelease (vtdec->format_description);
  vtdec->format_description = format_description;

  if (vtdec->input_state)
    gst_video_codec_state_unref (vtdec->input_state);
  vtdec->input_state = gst_video_codec_state_ref (state);

  return gst_video_decoder_negotiate (decoder);
}
Beispiel #11
0
static gboolean
gst_pngenc_set_format (GstVideoEncoder * encoder, GstVideoCodecState * state)
{
  GstPngEnc *pngenc;
  gboolean ret = TRUE;
  GstVideoInfo *info;
  GstVideoCodecState *output_state;

  pngenc = GST_PNGENC (encoder);
  info = &state->info;

  switch (GST_VIDEO_INFO_FORMAT (info)) {
    case GST_VIDEO_FORMAT_RGBA:
      pngenc->png_color_type = PNG_COLOR_TYPE_RGBA;
      break;
    case GST_VIDEO_FORMAT_RGB:
      pngenc->png_color_type = PNG_COLOR_TYPE_RGB;
      break;
    case GST_VIDEO_FORMAT_GRAY8:
    case GST_VIDEO_FORMAT_GRAY16_BE:
      pngenc->png_color_type = PNG_COLOR_TYPE_GRAY;
      break;
    default:
      ret = FALSE;
      goto done;
  }

  switch (GST_VIDEO_INFO_FORMAT (info)) {
    case GST_VIDEO_FORMAT_GRAY16_BE:
      pngenc->depth = 16;
      break;
    default:                   /* GST_VIDEO_FORMAT_RGB and GST_VIDEO_FORMAT_GRAY8 */
      pngenc->depth = 8;
      break;
  }

  if (pngenc->input_state)
    gst_video_codec_state_unref (pngenc->input_state);
  pngenc->input_state = gst_video_codec_state_ref (state);

  output_state =
      gst_video_encoder_set_output_state (encoder,
      gst_caps_new_empty_simple ("image/png"), state);
  gst_video_codec_state_unref (output_state);

done:

  return ret;
}
static gboolean gst_libde265_dec_set_format (VIDEO_DECODER_BASE * parse,
    GstVideoCodecState * state)
{
    GstLibde265Dec *dec = GST_LIBDE265_DEC (parse);

    if (dec->input_state != NULL) {
        gst_video_codec_state_unref(dec->input_state);
    }
    dec->input_state = state;
    if (state != NULL) {
        gst_video_codec_state_ref(state);
    }
    
    return TRUE;
}
static gboolean
gst_vdp_h264_dec_set_format (GstVideoDecoder * video_decoder,
    GstVideoCodecState * state)
{
  GstVdpH264Dec *h264_dec = GST_VDP_H264_DEC (video_decoder);

  if (h264_dec->input_state)
    gst_video_codec_state_unref (h264_dec->input_state);

  h264_dec->input_state = gst_video_codec_state_ref (state);

  GST_FIXME_OBJECT (video_decoder, "Do something when receiving input state ?");

  return TRUE;
}
Beispiel #14
0
static gboolean
gst_rsvg_dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstRsvgDec *rsvg = GST_RSVG_DEC (decoder);
  GstVideoInfo *info = &state->info;

  if (rsvg->input_state)
    gst_video_codec_state_unref (rsvg->input_state);
  rsvg->input_state = gst_video_codec_state_ref (state);

  /* Create the output state */
  gst_video_decoder_set_output_state (decoder, GST_RSVG_VIDEO_FORMAT,
      GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info),
      rsvg->input_state);

  return TRUE;
}
static gboolean
gst_openjpeg_dec_set_format (GstVideoDecoder * decoder,
    GstVideoCodecState * state)
{
  GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder);
  GstStructure *s;
  const gchar *color_space;

  GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);

  s = gst_caps_get_structure (state->caps, 0);

  self->color_space = CLRSPC_UNKNOWN;

  if (gst_structure_has_name (s, "image/jp2")) {
    self->codec_format = CODEC_JP2;
    self->is_jp2c = FALSE;
  } else if (gst_structure_has_name (s, "image/x-j2c")) {
    self->codec_format = CODEC_J2K;
    self->is_jp2c = TRUE;
  } else if (gst_structure_has_name (s, "image/x-jpc")) {
    self->codec_format = CODEC_J2K;
    self->is_jp2c = FALSE;
  } else {
    g_return_val_if_reached (FALSE);
  }

  if ((color_space = gst_structure_get_string (s, "colorspace"))) {
    if (g_str_equal (color_space, "sRGB"))
      self->color_space = CLRSPC_SRGB;
    else if (g_str_equal (color_space, "GRAY"))
      self->color_space = CLRSPC_GRAY;
    else if (g_str_equal (color_space, "sYUV"))
      self->color_space = CLRSPC_SYCC;
  }

  self->ncomps = 0;
  gst_structure_get_int (s, "num-components", &self->ncomps);

  if (self->input_state)
    gst_video_codec_state_unref (self->input_state);
  self->input_state = gst_video_codec_state_ref (state);

  return TRUE;
}
Beispiel #16
0
static gboolean
gst_pngdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstPngDec *pngdec = (GstPngDec *) decoder;

  if (pngdec->input_state)
    gst_video_codec_state_unref (pngdec->input_state);
  pngdec->input_state = gst_video_codec_state_ref (state);

  if (decoder->input_segment.format == GST_FORMAT_TIME)
    gst_video_decoder_set_packetized (decoder, TRUE);
  else
    gst_video_decoder_set_packetized (decoder, FALSE);

  /* We'll set format later on */

  return TRUE;
}
Beispiel #17
0
static gboolean
gst_pngdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstPngDec *pngdec = (GstPngDec *) decoder;
  GstVideoInfo *info = &state->info;

  if (pngdec->input_state)
    gst_video_codec_state_unref (pngdec->input_state);
  pngdec->input_state = gst_video_codec_state_ref (state);

  if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
    gst_video_decoder_set_packetized (decoder, TRUE);
  else
    gst_video_decoder_set_packetized (decoder, FALSE);

  /* We'll set format later on */

  return TRUE;
}
Beispiel #18
0
static gboolean
gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
{
  GstJpegDec *jpeg = GST_JPEG_DEC (dec);
  GstVideoInfo *info = &state->info;

  /* FIXME : previously jpegdec would handled input as packetized
   * if the framerate was present. Here we consider it packetized if
   * the fps is != 1/1 */
  if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
    gst_video_decoder_set_packetized (dec, TRUE);
  else
    gst_video_decoder_set_packetized (dec, FALSE);

  if (jpeg->input_state)
    gst_video_codec_state_unref (jpeg->input_state);
  jpeg->input_state = gst_video_codec_state_ref (state);

  return TRUE;
}
Beispiel #19
0
static gboolean
gst_pnmenc_set_format (GstVideoEncoder * encoder, GstVideoCodecState * state)
{
  GstPnmenc *pnmenc;
  gboolean ret = TRUE;
  GstVideoInfo *info;
  GstVideoCodecState *output_state;

  pnmenc = GST_PNMENC (encoder);
  info = &state->info;

  switch (GST_VIDEO_INFO_FORMAT (info)) {
    case GST_VIDEO_FORMAT_RGB:
      pnmenc->info.type = GST_PNM_TYPE_PIXMAP;
      break;
    case GST_VIDEO_FORMAT_GRAY8:
      pnmenc->info.type = GST_PNM_TYPE_GRAYMAP;
      break;
    default:
      ret = FALSE;
      goto done;
  }

  pnmenc->info.width = GST_VIDEO_INFO_WIDTH (info);
  pnmenc->info.height = GST_VIDEO_INFO_HEIGHT (info);
  /* Supported max value is only one, that is 255 */
  pnmenc->info.max = 255;

  if (pnmenc->input_state)
    gst_video_codec_state_unref (pnmenc->input_state);
  pnmenc->input_state = gst_video_codec_state_ref (state);

  output_state =
      gst_video_encoder_set_output_state (encoder,
      gst_caps_new_empty_simple ("image/pnm"), state);
  gst_video_codec_state_unref (output_state);

done:
  return ret;
}
Beispiel #20
0
static gboolean
gst_vp9_dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstVP9Dec *gst_vp9_dec = GST_VP9_DEC (decoder);

  GST_DEBUG_OBJECT (gst_vp9_dec, "set_format");

  if (gst_vp9_dec->decoder_inited)
    vpx_codec_destroy (&gst_vp9_dec->decoder);
  gst_vp9_dec->decoder_inited = FALSE;

  if (gst_vp9_dec->output_state) {
    gst_video_codec_state_unref (gst_vp9_dec->output_state);
    gst_vp9_dec->output_state = NULL;
  }

  if (gst_vp9_dec->input_state)
    gst_video_codec_state_unref (gst_vp9_dec->input_state);
  gst_vp9_dec->input_state = gst_video_codec_state_ref (state);

  return TRUE;
}
static gboolean
gst_webp_enc_set_format (GstVideoEncoder * encoder, GstVideoCodecState * state)
{
  GstWebpEnc *enc = GST_WEBP_ENC (encoder);
  GstVideoCodecState *output_state;
  GstVideoInfo *info;
  GstVideoFormat format;

  info = &state->info;
  format = GST_VIDEO_INFO_FORMAT (info);

  if (GST_VIDEO_INFO_IS_YUV (info)) {
    switch (format) {
      case GST_VIDEO_FORMAT_I420:
      case GST_VIDEO_FORMAT_YV12:
        enc->webp_color_space = WEBP_YUV420;
        break;
      default:
        break;
    }
  } else {
    if (GST_VIDEO_INFO_IS_RGB (info)) {
      enc->rgb_format = format;
      enc->use_argb = 1;
    }
  }

  if (enc->input_state)
    gst_video_codec_state_unref (enc->input_state);
  enc->input_state = gst_video_codec_state_ref (state);

  output_state =
      gst_video_encoder_set_output_state (GST_VIDEO_ENCODER (enc),
      gst_caps_new_empty_simple ("image/webp"), enc->input_state);
  gst_video_codec_state_unref (output_state);

  return TRUE;
}
Beispiel #22
0
static gboolean gst_aml_vdec_set_format(GstVideoDecoder *dec, GstVideoCodecState *state)
{
	gboolean ret = FALSE;
	GstStructure *structure;
	const char *name;
	GstVideoInfo *info;
	gint par_num, par_den;
	GstVideoFormat fmt;
	GstAmlVdec *amlvdec = GST_AMLVDEC(dec);

	g_return_val_if_fail(state != NULL, FALSE);
	if (amlvdec->input_state)
		gst_video_codec_state_unref(amlvdec->input_state);
	amlvdec->input_state = gst_video_codec_state_ref(state);

	structure = gst_caps_get_structure(state->caps, 0);
	name = gst_structure_get_name(structure);
	GST_INFO_OBJECT(amlvdec, "%s = %s", __FUNCTION__, name);
	if (name) {
		ret = gst_set_vstream_info(amlvdec, state->caps);
		if (!amlvdec->output_state) {
			info = &amlvdec->input_state->info;
			fmt = GST_VIDEO_FORMAT_xRGB;
			GST_VIDEO_INFO_WIDTH (info) = amlvdec->pcodec->am_sysinfo.width;
			GST_VIDEO_INFO_HEIGHT (info) = amlvdec->pcodec->am_sysinfo.height;
			par_num = GST_VIDEO_INFO_PAR_N(info);
			par_den = GST_VIDEO_INFO_PAR_D(info);
			amlvdec->output_state = gst_video_decoder_set_output_state(GST_VIDEO_DECODER(amlvdec),
					fmt, info->width,
					info->height,
					amlvdec->input_state);
			gst_video_decoder_negotiate (GST_VIDEO_DECODER (amlvdec));
		}

	}
	return ret;
}
static gboolean
gst_vaapiencode_set_format (GstVideoEncoder * venc, GstVideoCodecState * state)
{
  GstVaapiEncode *const encode = GST_VAAPIENCODE_CAST (venc);

  g_return_val_if_fail (state->caps != NULL, FALSE);

  if (!ensure_encoder (encode))
    return FALSE;
  if (!set_codec_state (encode, state))
    return FALSE;

  if (!gst_vaapi_plugin_base_set_caps (GST_VAAPI_PLUGIN_BASE (encode),
          state->caps, NULL))
    return FALSE;

  if (encode->input_state)
    gst_video_codec_state_unref (encode->input_state);
  encode->input_state = gst_video_codec_state_ref (state);
  encode->input_state_changed = TRUE;

  return gst_pad_start_task (GST_VAAPI_PLUGIN_BASE_SRC_PAD (encode),
      (GstTaskFunction) gst_vaapiencode_buffer_loop, encode, NULL);
}
Beispiel #24
0
static gboolean
gst_msdkdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstMsdkDec *thiz = GST_MSDKDEC (decoder);

  if (thiz->input_state) {
    /* mark for re-negotiation if display resolution changes */
    if ((GST_VIDEO_INFO_WIDTH (&thiz->input_state->info) !=
            GST_VIDEO_INFO_WIDTH (&state->info)) ||
        GST_VIDEO_INFO_HEIGHT (&thiz->input_state->info) !=
        GST_VIDEO_INFO_HEIGHT (&state->info))
      thiz->do_renego = TRUE;
    gst_video_codec_state_unref (thiz->input_state);
  }
  thiz->input_state = gst_video_codec_state_ref (state);

  /* we don't set output state here to avoid caching of mismatched
   * video information if there is dynamic resolution change in the stream.
   * All negotiation code is consolidated in gst_msdkdec_negotiate() and
   * this will be invoked from handle_frame() */

  gst_msdkdec_set_latency (thiz);
  return TRUE;
}
Beispiel #25
0
static gboolean
gst_amc_video_enc_set_format (GstVideoEncoder * encoder,
    GstVideoCodecState * state)
{
  GstAmcVideoEnc *self;
  GstAmcFormat *format = NULL;
  GstCaps *allowed_caps = NULL;
  gboolean is_format_change = FALSE;
  gboolean needs_disable = FALSE;
  gchar *format_string;
  gboolean r = FALSE;
  GError *err = NULL;

  self = GST_AMC_VIDEO_ENC (encoder);

  GST_DEBUG_OBJECT (self, "Setting new caps %" GST_PTR_FORMAT, state->caps);

  /* Check if the caps change is a real format change or if only irrelevant
   * parts of the caps have changed or nothing at all.
   */
  is_format_change |= self->color_format_info.width != state->info.width;
  is_format_change |= self->color_format_info.height != state->info.height;
  needs_disable = self->started;

  /* If the component is not started and a real format change happens
   * we have to restart the component. If no real format change
   * happened we can just exit here.
   */
  if (needs_disable && !is_format_change) {

    /* Framerate or something minor changed */
    if (self->input_state)
      gst_video_codec_state_unref (self->input_state);
    self->input_state = gst_video_codec_state_ref (state);
    GST_DEBUG_OBJECT (self,
        "Already running and caps did not change the format");
    return TRUE;
  }

  if (needs_disable && is_format_change) {
    gst_amc_video_enc_drain (self);
    GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
    gst_amc_video_enc_stop (GST_VIDEO_ENCODER (self));
    GST_VIDEO_ENCODER_STREAM_LOCK (self);
    gst_amc_video_enc_close (GST_VIDEO_ENCODER (self));
    if (!gst_amc_video_enc_open (GST_VIDEO_ENCODER (self))) {
      GST_ERROR_OBJECT (self, "Failed to open codec again");
      return FALSE;
    }

    if (!gst_amc_video_enc_start (GST_VIDEO_ENCODER (self))) {
      GST_ERROR_OBJECT (self, "Failed to start codec again");
    }
  }
  /* srcpad task is not running at this point */
  if (self->input_state)
    gst_video_codec_state_unref (self->input_state);
  self->input_state = NULL;

  GST_DEBUG_OBJECT (self, "picking an output format ...");
  allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
  if (!allowed_caps) {
    GST_DEBUG_OBJECT (self, "... but no peer, using template caps");
    allowed_caps =
        gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
  }
  GST_DEBUG_OBJECT (self, "chose caps %" GST_PTR_FORMAT, allowed_caps);
  allowed_caps = gst_caps_truncate (allowed_caps);

  format = create_amc_format (self, state, allowed_caps);
  if (!format)
    goto quit;

  format_string = gst_amc_format_to_string (format, &err);
  if (err)
    GST_ELEMENT_WARNING_FROM_ERROR (self, err);
  GST_DEBUG_OBJECT (self, "Configuring codec with format: %s",
      GST_STR_NULL (format_string));
  g_free (format_string);

  if (!gst_amc_codec_configure (self->codec, format, 1, &err)) {
    GST_ERROR_OBJECT (self, "Failed to configure codec");
    GST_ELEMENT_ERROR_FROM_ERROR (self, err);
    goto quit;
  }

  if (!gst_amc_codec_start (self->codec, &err)) {
    GST_ERROR_OBJECT (self, "Failed to start codec");
    GST_ELEMENT_ERROR_FROM_ERROR (self, err);
    goto quit;
  }

  self->amc_format = format;
  format = NULL;

  self->input_state = gst_video_codec_state_ref (state);

  self->started = TRUE;

  /* Start the srcpad loop again */
  self->flushing = FALSE;
  self->downstream_flow_ret = GST_FLOW_OK;
  gst_pad_start_task (GST_VIDEO_ENCODER_SRC_PAD (self),
      (GstTaskFunction) gst_amc_video_enc_loop, encoder, NULL);

  r = TRUE;

quit:
  if (allowed_caps)
    gst_object_unref (allowed_caps);

  if (format)
    gst_amc_format_free (format);

  return r;
}
static gboolean
gst_openjpeg_enc_set_format (GstVideoEncoder * encoder,
    GstVideoCodecState * state)
{
  GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder);
  GstCaps *allowed_caps, *caps;
  GstStructure *s;
  const gchar *colorspace;
  gint ncomps;

  GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);

  if (self->input_state)
    gst_video_codec_state_unref (self->input_state);
  self->input_state = gst_video_codec_state_ref (state);

  allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
  allowed_caps = gst_caps_truncate (allowed_caps);
  s = gst_caps_get_structure (allowed_caps, 0);
  if (gst_structure_has_name (s, "image/jp2")) {
    self->codec_format = OPJ_CODEC_JP2;
    self->is_jp2c = FALSE;
  } else if (gst_structure_has_name (s, "image/x-j2c")) {
    self->codec_format = OPJ_CODEC_J2K;
    self->is_jp2c = TRUE;
  } else if (gst_structure_has_name (s, "image/x-jpc")) {
    self->codec_format = OPJ_CODEC_J2K;
    self->is_jp2c = FALSE;
  } else {
    g_return_val_if_reached (FALSE);
  }

  switch (state->info.finfo->format) {
    case GST_VIDEO_FORMAT_ARGB64:
      self->fill_image = fill_image_packed16_4;
      ncomps = 4;
      break;
    case GST_VIDEO_FORMAT_ARGB:
      self->fill_image = fill_image_packed8_4;
      ncomps = 4;
      break;
    case GST_VIDEO_FORMAT_xRGB:
      self->fill_image = fill_image_packed8_3;
      ncomps = 3;
      break;
    case GST_VIDEO_FORMAT_AYUV64:
      self->fill_image = fill_image_packed16_4;
      ncomps = 4;
      break;
    case GST_VIDEO_FORMAT_Y444_10LE:
    case GST_VIDEO_FORMAT_Y444_10BE:
    case GST_VIDEO_FORMAT_I422_10LE:
    case GST_VIDEO_FORMAT_I422_10BE:
    case GST_VIDEO_FORMAT_I420_10LE:
    case GST_VIDEO_FORMAT_I420_10BE:
      self->fill_image = fill_image_planar16_3;
      ncomps = 3;
      break;
    case GST_VIDEO_FORMAT_AYUV:
      self->fill_image = fill_image_packed8_3;
      ncomps = 3;
      break;
    case GST_VIDEO_FORMAT_Y444:
    case GST_VIDEO_FORMAT_Y42B:
    case GST_VIDEO_FORMAT_I420:
    case GST_VIDEO_FORMAT_Y41B:
    case GST_VIDEO_FORMAT_YUV9:
      self->fill_image = fill_image_planar8_3;
      ncomps = 3;
      break;
    case GST_VIDEO_FORMAT_GRAY8:
      self->fill_image = fill_image_planar8_1;
      ncomps = 1;
      break;
    case GST_VIDEO_FORMAT_GRAY16_LE:
    case GST_VIDEO_FORMAT_GRAY16_BE:
      self->fill_image = fill_image_planar16_1;
      ncomps = 1;
      break;
    default:
      g_assert_not_reached ();
  }

  if ((state->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_YUV))
    colorspace = "sYUV";
  else if ((state->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_RGB))
    colorspace = "sRGB";
  else if ((state->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_GRAY))
    colorspace = "GRAY";
  else
    g_return_val_if_reached (FALSE);

  caps = gst_caps_new_simple (gst_structure_get_name (s),
      "colorspace", G_TYPE_STRING, colorspace,
      "num-components", G_TYPE_INT, ncomps, NULL);
  gst_caps_unref (allowed_caps);

  if (self->output_state)
    gst_video_codec_state_unref (self->output_state);
  self->output_state =
      gst_video_encoder_set_output_state (encoder, caps, state);

  gst_video_encoder_negotiate (GST_VIDEO_ENCODER (encoder));

  return TRUE;
}
Beispiel #27
0
static gboolean
gst_libde265_dec_set_format (GstVideoDecoder * decoder,
    GstVideoCodecState * state)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);

  if (dec->input_state != NULL) {
    gst_video_codec_state_unref (dec->input_state);
  }
  dec->input_state = state;
  if (state != NULL) {
    gst_video_codec_state_ref (state);
  }
  if (state != NULL && state->caps != NULL) {
    GstStructure *str;
    const GValue *value;
    str = gst_caps_get_structure (state->caps, 0);
    if ((value = gst_structure_get_value (str, "codec_data"))) {
      GstMapInfo info;
      guint8 *data;
      gsize size;
      GstBuffer *buf;
      de265_error err;
      int more;

      buf = gst_value_get_buffer (value);
      if (!gst_buffer_map (buf, &info, GST_MAP_READ)) {
        GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
            ("Failed to map codec data"), (NULL));
        return FALSE;
      }
      data = info.data;
      size = info.size;
      free (dec->codec_data);
      dec->codec_data = malloc (size);
      g_assert (dec->codec_data != NULL);
      dec->codec_data_size = size;
      memcpy (dec->codec_data, data, size);
      if (size > 3 && (data[0] || data[1] || data[2] > 1)) {
        /* encoded in "hvcC" format (assume version 0) */
        dec->format = GST_TYPE_LIBDE265_FORMAT_PACKETIZED;
        if (size > 22) {
          int i;
          int num_param_sets;
          int pos;
          if (data[0] != 0) {
            GST_ELEMENT_WARNING (decoder, STREAM,
                DECODE, ("Unsupported extra data version %d, decoding may fail",
                    data[0]), (NULL));
          }
          dec->length_size = (data[21] & 3) + 1;
          num_param_sets = data[22];
          pos = 23;
          for (i = 0; i < num_param_sets; i++) {
            int j;
            int nal_count;
            if (pos + 3 > size) {
              GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
                  ("Buffer underrun in extra header (%d >= %" G_GSIZE_FORMAT
                      ")", pos + 3, size), (NULL));
              return FALSE;
            }
            /* ignore flags + NAL type (1 byte) */
            nal_count = data[pos + 1] << 8 | data[pos + 2];
            pos += 3;
            for (j = 0; j < nal_count; j++) {
              int nal_size;
              if (pos + 2 > size) {
                GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
                    ("Buffer underrun in extra nal header (%d >= %"
                        G_GSIZE_FORMAT ")", pos + 2, size), (NULL));
                return FALSE;
              }
              nal_size = data[pos] << 8 | data[pos + 1];
              if (pos + 2 + nal_size > size) {
                GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
                    ("Buffer underrun in extra nal (%d >= %" G_GSIZE_FORMAT ")",
                        pos + 2 + nal_size, size), (NULL));
                return FALSE;
              }
              err =
                  de265_push_NAL (dec->ctx, data + pos + 2, nal_size, 0, NULL);
              if (!de265_isOK (err)) {
                GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
                    ("Failed to push data: %s (%d)", de265_get_error_text (err),
                        err), (NULL));
                return FALSE;
              }
              pos += 2 + nal_size;
            }
          }
        }
        GST_DEBUG ("Assuming packetized data (%d bytes length)",
            dec->length_size);
      } else {
        dec->format = GST_TYPE_LIBDE265_FORMAT_BYTESTREAM;
        GST_DEBUG_OBJECT (dec, "Assuming non-packetized data");
        err = de265_push_data (dec->ctx, data, size, 0, NULL);
        if (!de265_isOK (err)) {
          gst_buffer_unmap (buf, &info);
          GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
              ("Failed to push codec data: %s (code=%d)",
                  de265_get_error_text (err), err), (NULL));
          return FALSE;
        }
      }
      gst_buffer_unmap (buf, &info);
      de265_push_end_of_NAL (dec->ctx);
      do {
        err = de265_decode (dec->ctx, &more);
        switch (err) {
          case DE265_OK:
            break;

          case DE265_ERROR_IMAGE_BUFFER_FULL:
          case DE265_ERROR_WAITING_FOR_INPUT_DATA:
            /* not really an error */
            more = 0;
            break;

          default:
            if (!de265_isOK (err)) {
              GST_ELEMENT_ERROR (decoder, STREAM, DECODE,
                  ("Failed to decode codec data: %s (code=%d)",
                      de265_get_error_text (err), err), (NULL));
              return FALSE;
            }
        }
      } while (more);
    } else if ((value = gst_structure_get_value (str, "stream-format"))) {
      const gchar *str = g_value_get_string (value);
      if (strcmp (str, "byte-stream") == 0) {
        dec->format = GST_TYPE_LIBDE265_FORMAT_BYTESTREAM;
        GST_DEBUG_OBJECT (dec, "Assuming raw byte-stream");
      }
    }
  }

  return TRUE;
}
static gboolean
gst_openh264enc_set_format (GstVideoEncoder * encoder,
    GstVideoCodecState * state)
{
  GstOpenh264Enc *openh264enc = GST_OPENH264ENC (encoder);
  gchar *debug_caps;
  guint width, height, fps_n, fps_d;
  SEncParamExt enc_params;
  SliceModeEnum slice_mode = SM_SINGLE_SLICE;
  guint n_slices = 1;
  gint ret;
  GstCaps *outcaps;
  GstVideoCodecState *output_state;
  openh264enc->frame_count = 0;
  int video_format = videoFormatI420;

  debug_caps = gst_caps_to_string (state->caps);
  GST_DEBUG_OBJECT (openh264enc, "gst_e26d4_enc_set_format called, caps: %s",
      debug_caps);
  g_free (debug_caps);

  gst_openh264enc_stop (encoder);

  if (openh264enc->input_state) {
    gst_video_codec_state_unref (openh264enc->input_state);
  }
  openh264enc->input_state = gst_video_codec_state_ref (state);

  width = GST_VIDEO_INFO_WIDTH (&state->info);
  height = GST_VIDEO_INFO_HEIGHT (&state->info);
  fps_n = GST_VIDEO_INFO_FPS_N (&state->info);
  fps_d = GST_VIDEO_INFO_FPS_D (&state->info);

  if (openh264enc->encoder != NULL) {
    openh264enc->encoder->Uninitialize ();
    WelsDestroySVCEncoder (openh264enc->encoder);
    openh264enc->encoder = NULL;
  }
  WelsCreateSVCEncoder (&openh264enc->encoder);
  unsigned int uiTraceLevel = WELS_LOG_ERROR;
  openh264enc->encoder->SetOption (ENCODER_OPTION_TRACE_LEVEL, &uiTraceLevel);

  openh264enc->encoder->GetDefaultParams (&enc_params);

  enc_params.iUsageType = openh264enc->usage_type;
  enc_params.iPicWidth = width;
  enc_params.iPicHeight = height;
  enc_params.iTargetBitrate = openh264enc->bitrate;
  enc_params.iMaxBitrate = openh264enc->max_bitrate;
  enc_params.iRCMode = openh264enc->rate_control;
  enc_params.iTemporalLayerNum = 1;
  enc_params.iSpatialLayerNum = 1;
  enc_params.iLtrMarkPeriod = 30;
  enc_params.iMultipleThreadIdc = openh264enc->multi_thread;
  enc_params.bEnableDenoise = openh264enc->enable_denoise;
  enc_params.iComplexityMode = openh264enc->complexity;
  enc_params.uiIntraPeriod = openh264enc->gop_size;
  enc_params.bEnableBackgroundDetection = openh264enc->background_detection;
  enc_params.bEnableAdaptiveQuant = openh264enc->adaptive_quantization;
  enc_params.bEnableSceneChangeDetect = openh264enc->scene_change_detection;
  enc_params.bEnableFrameSkip = openh264enc->enable_frame_skip;
  enc_params.bEnableLongTermReference = 0;
#if OPENH264_MINOR >= 4
  enc_params.eSpsPpsIdStrategy = CONSTANT_ID;
#else
  enc_params.bEnableSpsPpsIdAddition = 0;
#endif
  enc_params.bPrefixNalAddingCtrl = 0;
  enc_params.fMaxFrameRate = fps_n * 1.0 / fps_d;
  enc_params.iLoopFilterDisableIdc = openh264enc->deblocking_mode;
  enc_params.sSpatialLayers[0].uiProfileIdc = PRO_BASELINE;
  enc_params.sSpatialLayers[0].iVideoWidth = enc_params.iPicWidth;
  enc_params.sSpatialLayers[0].iVideoHeight = enc_params.iPicHeight;
  enc_params.sSpatialLayers[0].fFrameRate = fps_n * 1.0 / fps_d;
  enc_params.sSpatialLayers[0].iSpatialBitrate = enc_params.iTargetBitrate;
  enc_params.sSpatialLayers[0].iMaxSpatialBitrate = enc_params.iMaxBitrate;

  if (openh264enc->slice_mode == GST_OPENH264_SLICE_MODE_N_SLICES) {
    if (openh264enc->num_slices == 1)
      slice_mode = SM_SINGLE_SLICE;
    else
      slice_mode = SM_FIXEDSLCNUM_SLICE;
     n_slices = openh264enc->num_slices;
  } else if (openh264enc->slice_mode == GST_OPENH264_SLICE_MODE_AUTO) {
#if OPENH264_MAJOR == 1 && OPENH264_MINOR < 6
    slice_mode = SM_AUTO_SLICE;
#else
    slice_mode = SM_FIXEDSLCNUM_SLICE;
    n_slices = 0;
#endif
  } else {
    GST_ERROR_OBJECT (openh264enc, "unexpected slice mode %d",
        openh264enc->slice_mode);
    slice_mode = SM_SINGLE_SLICE;
  }

#if OPENH264_MAJOR == 1 && OPENH264_MINOR < 6
  enc_params.sSpatialLayers[0].sSliceCfg.uiSliceMode = slice_mode;
  enc_params.sSpatialLayers[0].sSliceCfg.sSliceArgument.uiSliceNum = n_slices;
#else
  enc_params.sSpatialLayers[0].sSliceArgument.uiSliceMode = slice_mode;
  enc_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = n_slices;
#endif

  openh264enc->framerate = (1 + fps_n / fps_d);

  ret = openh264enc->encoder->InitializeExt (&enc_params);

  if (ret != cmResultSuccess) {
    GST_ERROR_OBJECT (openh264enc, "failed to initialize encoder");
    return FALSE;
  }

  openh264enc->encoder->SetOption (ENCODER_OPTION_DATAFORMAT, &video_format);

  outcaps =
      gst_caps_copy (gst_static_pad_template_get_caps
      (&gst_openh264enc_src_template));

  output_state = gst_video_encoder_set_output_state (encoder, outcaps, state);
  gst_video_codec_state_unref (output_state);

  return gst_video_encoder_negotiate (encoder);
}
static gboolean
theora_dec_set_format (GstVideoDecoder * bdec, GstVideoCodecState * state)
{
  GstTheoraDec *dec;

  dec = GST_THEORA_DEC (bdec);

  /* Keep a copy of the input state */
  if (dec->input_state)
    gst_video_codec_state_unref (dec->input_state);
  dec->input_state = gst_video_codec_state_ref (state);

  /* FIXME : Interesting, we always accept any kind of caps ? */
  if (state->codec_data) {
    GstBuffer *buffer;
    GstMapInfo minfo;
    guint8 *data;
    guint size;
    guint offset;

    buffer = state->codec_data;
    gst_buffer_map (buffer, &minfo, GST_MAP_READ);

    offset = 0;
    size = minfo.size;
    data = (guint8 *) minfo.data;

    while (size > 2) {
      guint psize;
      GstBuffer *buf;

      psize = (data[0] << 8) | data[1];
      /* skip header */
      data += 2;
      size -= 2;
      offset += 2;

      /* make sure we don't read too much */
      psize = MIN (psize, size);

      buf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset, psize);

      /* first buffer is a discont buffer */
      if (offset == 2)
        GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);

      /* now feed it to the decoder we can ignore the error */
      theora_dec_decode_buffer (dec, buf, NULL);
      gst_buffer_unref (buf);

      /* skip the data */
      size -= psize;
      data += psize;
      offset += psize;
    }

    gst_buffer_unmap (buffer, &minfo);
  }

  GST_DEBUG_OBJECT (dec, "Done");

  return TRUE;
}
Beispiel #30
0
static gboolean
gst_mfc_dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
{
  GstMFCDec *self = GST_MFC_DEC (decoder);
  GstStructure *s;

  GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);

  if (self->input_state
      && gst_caps_can_intersect (self->input_state->caps, state->caps)) {
    GST_DEBUG_OBJECT (self, "Compatible caps");
    goto done;
  }

  s = gst_caps_get_structure (state->caps, 0);

  if (self->context) {
    mfc_dec_destroy (self->context);
    self->context = NULL;
  }
  self->initialized = FALSE;

  if (gst_structure_has_name (s, "video/x-h264")) {
    self->context = mfc_dec_create (CODEC_TYPE_H264);
    if (!self->context) {
      GST_ELEMENT_ERROR (self, LIBRARY, INIT,
          ("Failed to initialize MFC decoder context"), (NULL));
      return FALSE;
    }
  } else if (gst_structure_has_name (s, "video/mpeg")) {
    gint mpegversion;

    if (!gst_structure_get_int (s, "mpegversion", &mpegversion))
      return FALSE;
    if (mpegversion != 1 && mpegversion != 2 && mpegversion != 4)
      return FALSE;

    if (mpegversion == 1 || mpegversion == 2) {
      self->context = mfc_dec_create (CODEC_TYPE_MPEG2);
    } else {
      self->context = mfc_dec_create (CODEC_TYPE_MPEG4);
    }

    if (!self->context) {
      GST_ELEMENT_ERROR (self, LIBRARY, INIT,
          ("Failed to initialize MFC decoder context"), (NULL));
      return FALSE;
    }
  } else if (gst_structure_has_name (s, "video/x-h263")) {
    self->context = mfc_dec_create (CODEC_TYPE_H263);
    if (!self->context) {
      GST_ELEMENT_ERROR (self, LIBRARY, INIT,
          ("Failed to initialize MFC decoder context"), (NULL));
      return FALSE;
    }
  } else {
    g_return_val_if_reached (FALSE);
  }

  if (mfc_dec_init_input (self->context, 1) < 0) {
    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to initialize MFC decoder context input"), (NULL));
    return FALSE;
  }

  gst_buffer_replace (&self->codec_data, state->codec_data);

done:
  if (self->input_state)
    gst_video_codec_state_unref (self->input_state);
  self->input_state = gst_video_codec_state_ref (state);

  return TRUE;
}