static gboolean
fill_slices (GstVaapiEncoderMpeg2 * encoder, GstVaapiEncPicture * picture)
{
  VAEncSliceParameterBufferMPEG2 *slice_param;
  GstVaapiEncSlice *slice;
  guint width_in_mbs, height_in_mbs;
  guint i_slice;

  g_assert (picture);

  width_in_mbs = (GST_VAAPI_ENCODER_WIDTH (encoder) + 15) / 16;
  height_in_mbs = (GST_VAAPI_ENCODER_HEIGHT (encoder) + 15) / 16;

  for (i_slice = 0; i_slice < height_in_mbs; ++i_slice) {
    slice = GST_VAAPI_ENC_SLICE_NEW (MPEG2, encoder);
    g_assert (slice && slice->param_id != VA_INVALID_ID);
    slice_param = slice->param;

    memset (slice_param, 0, sizeof (VAEncSliceParameterBufferMPEG2));

    slice_param->macroblock_address = i_slice * width_in_mbs;
    slice_param->num_macroblocks = width_in_mbs;
    slice_param->is_intra_slice = (picture->type == GST_VAAPI_PICTURE_TYPE_I);
    slice_param->quantiser_scale_code = encoder->cqp / 2;

    gst_vaapi_enc_picture_add_slice (picture, slice);
    gst_vaapi_codec_object_replace (&slice, NULL);
  }
  return TRUE;
}
static gboolean
fill_picture (GstVaapiEncoderJpeg * encoder,
    GstVaapiEncPicture * picture,
    GstVaapiCodedBuffer * codedbuf, GstVaapiSurfaceProxy * surface)
{
  VAEncPictureParameterBufferJPEG *const pic_param = picture->param;

  memset (pic_param, 0, sizeof (VAEncPictureParameterBufferJPEG));

  pic_param->reconstructed_picture =
      GST_VAAPI_SURFACE_PROXY_SURFACE_ID (surface);
  pic_param->picture_width = GST_VAAPI_ENCODER_WIDTH (encoder);
  pic_param->picture_height = GST_VAAPI_ENCODER_HEIGHT (encoder);
  pic_param->coded_buf = GST_VAAPI_OBJECT_ID (codedbuf);

  pic_param->pic_flags.bits.profile = 0;        /* Profile = Baseline */
  pic_param->pic_flags.bits.progressive = 0;    /* Sequential encoding */
  pic_param->pic_flags.bits.huffman = 1;        /* Uses Huffman coding */
  pic_param->pic_flags.bits.interleaved = 0;    /* Input format is non interleaved (YUV) */
  pic_param->pic_flags.bits.differential = 0;   /* non-Differential Encoding */
  pic_param->sample_bit_depth = 8;
  pic_param->num_scan = 1;
  pic_param->num_components = encoder->n_components;
  pic_param->quality = encoder->quality;
  return TRUE;
}
/* Updates video context */
static gboolean
set_context_info (GstVaapiEncoder * encoder)
{
  GstVaapiContextInfo *const cip = &encoder->context_info;
  GstVaapiConfigInfoEncoder *const config = &cip->config.encoder;
  const GstVideoFormat format =
    GST_VIDEO_INFO_FORMAT (GST_VAAPI_ENCODER_VIDEO_INFO (encoder));
  const GstVaapiEncoderClassData *const cdata =
      GST_VAAPI_ENCODER_GET_CLASS (encoder)->class_data;

  cip->usage = GST_VAAPI_CONTEXT_USAGE_ENCODE;
  cip->profile = encoder->profile;
  cip->entrypoint = GST_VAAPI_ENTRYPOINT_SLICE_ENCODE;
  if (cdata->codec != GST_VAAPI_CODEC_JPEG)
    cip->entrypoint = GST_VAAPI_ENTRYPOINT_SLICE_ENCODE;
  else
    cip->entrypoint = GST_VAAPI_ENTRYPOINT_PICTURE_ENCODE;
  cip->chroma_type = gst_vaapi_video_format_get_chroma_type (format);
  cip->width = GST_VAAPI_ENCODER_WIDTH (encoder);
  cip->height = GST_VAAPI_ENCODER_HEIGHT (encoder);
  cip->ref_frames = encoder->num_ref_frames;

  if (!cip->chroma_type  && (format != GST_VIDEO_FORMAT_ENCODED))
    goto error_unsupported_format;

  if (cip->chroma_type != GST_VAAPI_CHROMA_TYPE_YUV420 &&
      format != GST_VIDEO_FORMAT_ENCODED) {
    GST_ERROR ("We are only supporting YUV:4:2:0 for encoding,"
        "please try to use vaapipostproc to convert the input format!");
    goto error_unsupported_format;
  }
  
  memset (config, 0, sizeof (*config));
  config->rc_mode = GST_VAAPI_ENCODER_RATE_CONTROL (encoder);
  config->packed_headers = get_packed_headers (encoder);
  return TRUE;

  /* ERRORS */
error_unsupported_format:
  {
    GST_ERROR ("failed to determine chroma type for format %s",
        gst_vaapi_video_format_to_string (format));
    return FALSE;
  }
}
static gboolean
fill_sequence (GstVaapiEncoderVP8 * encoder, GstVaapiEncSequence * sequence)
{
  GstVaapiEncoder *const base_encoder = GST_VAAPI_ENCODER_CAST (encoder);
  VAEncSequenceParameterBufferVP8 *const seq_param = sequence->param;

  memset (seq_param, 0, sizeof (VAEncSequenceParameterBufferVP8));

  seq_param->frame_width = GST_VAAPI_ENCODER_WIDTH (encoder);
  seq_param->frame_height = GST_VAAPI_ENCODER_HEIGHT (encoder);

  if (GST_VAAPI_ENCODER_RATE_CONTROL (encoder) == GST_VAAPI_RATECONTROL_CBR ||
      GST_VAAPI_ENCODER_RATE_CONTROL (encoder) == GST_VAAPI_RATECONTROL_VBR)
    seq_param->bits_per_second = base_encoder->bitrate * 1000;

  seq_param->intra_period = base_encoder->keyframe_period;

  return TRUE;
}
static gboolean
fill_sequence (GstVaapiEncoderMpeg2 * encoder, GstVaapiEncSequence * sequence)
{
  GstVaapiEncoder *const base_encoder = GST_VAAPI_ENCODER_CAST (encoder);
  VAEncSequenceParameterBufferMPEG2 *const seq_param = sequence->param;

  memset (seq_param, 0, sizeof (VAEncSequenceParameterBufferMPEG2));

  seq_param->intra_period = base_encoder->keyframe_period;
  seq_param->ip_period = encoder->ip_period;
  seq_param->picture_width = GST_VAAPI_ENCODER_WIDTH (encoder);
  seq_param->picture_height = GST_VAAPI_ENCODER_HEIGHT (encoder);

  if (base_encoder->bitrate > 0)
    seq_param->bits_per_second = base_encoder->bitrate * 1000;
  else
    seq_param->bits_per_second = 0;

  if (GST_VAAPI_ENCODER_FPS_D (encoder))
    seq_param->frame_rate =
        GST_VAAPI_ENCODER_FPS_N (encoder) / GST_VAAPI_ENCODER_FPS_D (encoder);
  else
    seq_param->frame_rate = 0;

  seq_param->aspect_ratio_information = 1;
  seq_param->vbv_buffer_size = 3;       /* B = 16 * 1024 * vbv_buffer_size */

  seq_param->sequence_extension.bits.profile_and_level_indication =
      (encoder->profile_idc << 4) | encoder->level_idc;
  seq_param->sequence_extension.bits.progressive_sequence = 1;  /* progressive frame-pictures */
  seq_param->sequence_extension.bits.chroma_format =
      gst_vaapi_utils_mpeg2_get_chroma_format_idc
      (GST_VAAPI_CHROMA_TYPE_YUV420);
  seq_param->sequence_extension.bits.low_delay = 0;     /* FIXME */
  seq_param->sequence_extension.bits.frame_rate_extension_n = 0;        /* FIXME */
  seq_param->sequence_extension.bits.frame_rate_extension_d = 0;

  seq_param->gop_header.bits.time_code = (1 << 12);     /* bit12: marker_bit */
  seq_param->gop_header.bits.closed_gop = 0;
  seq_param->gop_header.bits.broken_link = 0;

  return TRUE;
}
static gboolean
ensure_bitrate (GstVaapiEncoderMpeg2 * encoder)
{
  GstVaapiEncoder *const base_encoder = GST_VAAPI_ENCODER_CAST (encoder);

  /* Default compression: 64 bits per macroblock */
  switch (GST_VAAPI_ENCODER_RATE_CONTROL (encoder)) {
    case GST_VAAPI_RATECONTROL_CBR:
      if (!base_encoder->bitrate)
        base_encoder->bitrate =
            gst_util_uint64_scale (GST_VAAPI_ENCODER_WIDTH (encoder) *
            GST_VAAPI_ENCODER_HEIGHT (encoder),
            GST_VAAPI_ENCODER_FPS_N (encoder),
            GST_VAAPI_ENCODER_FPS_D (encoder)) / 4 / 1000;
      break;
    default:
      base_encoder->bitrate = 0;
      break;
  }
  return TRUE;
}