Ejemplo n.º 1
0
static gboolean
gst_vaapidecode_sink_query (GstVideoDecoder * vdec, GstQuery * query)
{
  gboolean ret = TRUE;
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstVaapiPluginBase *const plugin = GST_VAAPI_PLUGIN_BASE (decode);

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CAPS:{
      GstCaps *caps, *filter = NULL;
      GstPad *pad = GST_VIDEO_DECODER_SINK_PAD (vdec);

      gst_query_parse_caps (query, &filter);
      caps = gst_vaapidecode_get_caps (pad);

      if (filter) {
        GstCaps *tmp = caps;
        caps = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
        gst_caps_unref (tmp);
      }

      gst_query_set_caps_result (query, caps);
      gst_caps_unref (caps);
      break;
    }
    case GST_QUERY_CONTEXT:{
      ret = gst_vaapi_handle_context_query (query, plugin->display);
      break;
    }
    default:{
#if GST_CHECK_VERSION(1,4,0)
      ret = GST_VIDEO_DECODER_CLASS (gst_vaapidecode_parent_class)->sink_query
          (vdec, query);
#else
      GstPad *pad = GST_VIDEO_DECODER_SINK_PAD (vdec);
      GstObject *parent = gst_pad_get_parent (pad);
      ret = plugin->sinkpad_query (pad, parent, query);
      if (parent)
        gst_object_unref (parent);
#endif
      break;
    }
  }

  return ret;
}
Ejemplo n.º 2
0
static void
gst_rsvg_dec_init (GstRsvgDec * rsvg)
{
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (rsvg);
  gst_video_decoder_set_packetized (decoder, FALSE);
  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
      (rsvg), TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (rsvg));
}
Ejemplo n.º 3
0
static void
gst_pnmdec_init (GstPnmdec * s)
{
  /* Initialize decoder */
  s->buf = NULL;
  gst_pnmdec_flush (s);

  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
      (s), TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (s));
}
Ejemplo n.º 4
0
static void
gst_mpeg2dec_init (GstMpeg2dec * mpeg2dec)
{
  gst_video_decoder_set_packetized (GST_VIDEO_DECODER (mpeg2dec), TRUE);
  gst_video_decoder_set_needs_format (GST_VIDEO_DECODER (mpeg2dec), TRUE);
  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
      (mpeg2dec), TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (mpeg2dec));

  /* initialize the mpeg2dec acceleration */
}
Ejemplo n.º 5
0
static void
gst_daala_dec_init (GstDaalaDec * dec)
{
  /* input is packetized,
   * but is not marked that way so data gets parsed and keyframes marked */
  gst_video_decoder_set_packetized (GST_VIDEO_DECODER (dec), FALSE);
  gst_video_decoder_set_needs_format (GST_VIDEO_DECODER (dec), TRUE);
  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
      (dec), TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
}
Ejemplo n.º 6
0
static void
gst_libde265_dec_init (GstLibde265Dec * dec)
{
  dec->format = DEFAULT_FORMAT;
  dec->max_threads = DEFAULT_MAX_THREADS;
  dec->length_size = 4;
  _gst_libde265_dec_reset_decoder (dec);
  gst_video_decoder_set_packetized (GST_VIDEO_DECODER (dec), TRUE);
  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
      (dec), TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
}
Ejemplo n.º 7
0
static GstFlowReturn
gst_vaapidecode_parse_frame (GstVideoDecoder * vdec,
    GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos)
{
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstVaapiDecoderStatus status;
  GstFlowReturn ret;
  guint got_unit_size;
  gboolean got_frame;

  status = gst_vaapi_decoder_parse (decode->decoder, frame,
      adapter, at_eos, &got_unit_size, &got_frame);

  switch (status) {
    case GST_VAAPI_DECODER_STATUS_SUCCESS:
      if (got_unit_size > 0) {
        gst_video_decoder_add_to_frame (vdec, got_unit_size);
        decode->current_frame_size += got_unit_size;
      }
      if (got_frame) {
        ret = gst_video_decoder_have_frame (vdec);
        decode->current_frame_size = 0;
      } else
        ret = GST_VAAPI_DECODE_FLOW_PARSE_DATA;
      break;
    case GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA:
      ret = GST_VIDEO_DECODER_FLOW_NEED_DATA;
      break;
    case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC:
    case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE:
    case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT:
      GST_WARNING ("parse error %d", status);
      ret = GST_FLOW_NOT_SUPPORTED;
      decode->current_frame_size = 0;
      break;
    default:
      GST_WARNING ("parse error %d", status);
      /* just keep parsing, the decoder should have flushed the broken unit */
      ret = GST_VAAPI_DECODE_FLOW_PARSE_DATA;
      decode->current_frame_size = 0;

      GST_INFO ("requesting upstream a key unit");
      gst_pad_push_event (GST_VIDEO_DECODER_SINK_PAD (decode),
          gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
              FALSE, 0));
      break;
  }
  return ret;
}
Ejemplo n.º 8
0
static void
gst_openjpeg_dec_init (GstOpenJPEGDec * self)
{
  GstVideoDecoder *decoder = (GstVideoDecoder *) self;

  gst_video_decoder_set_packetized (decoder, TRUE);
  gst_video_decoder_set_needs_format (decoder, TRUE);
  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
      (self), TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (self));
  opj_set_default_decoder_parameters (&self->params);
#ifdef HAVE_OPENJPEG_1
  self->params.cp_limit_decoding = NO_LIMITATION;
#endif
}
Ejemplo n.º 9
0
static void
gst_pngdec_init (GstPngDec * pngdec)
{
  pngdec->png = NULL;
  pngdec->info = NULL;
  pngdec->endinfo = NULL;

  pngdec->color_type = -1;

  pngdec->image_ready = FALSE;
  pngdec->read_data = 0;

  gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
      (pngdec), TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (pngdec));
}
Ejemplo n.º 10
0
static void
gst_vp9_dec_init (GstVP9Dec * gst_vp9_dec)
{
  GstVideoDecoder *decoder = (GstVideoDecoder *) gst_vp9_dec;

  GST_DEBUG_OBJECT (gst_vp9_dec, "gst_vp9_dec_init");
  gst_video_decoder_set_packetized (decoder, TRUE);
  gst_vp9_dec->post_processing = DEFAULT_POST_PROCESSING;
  gst_vp9_dec->post_processing_flags = DEFAULT_POST_PROCESSING_FLAGS;
  gst_vp9_dec->deblocking_level = DEFAULT_DEBLOCKING_LEVEL;
  gst_vp9_dec->noise_level = DEFAULT_NOISE_LEVEL;

  gst_video_decoder_set_needs_format (decoder, TRUE);
  gst_video_decoder_set_use_default_pad_acceptcaps (decoder, TRUE);
  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (decoder));
}
Ejemplo n.º 11
0
static void
gst_ffmpegauddec_init (GstFFMpegAudDec * ffmpegdec)
{
  GstFFMpegAudDecClass *klass =
      (GstFFMpegAudDecClass *) G_OBJECT_GET_CLASS (ffmpegdec);

  /* some ffmpeg data */
  ffmpegdec->context = avcodec_alloc_context3 (klass->in_plugin);
  ffmpegdec->context->opaque = ffmpegdec;
  ffmpegdec->opened = FALSE;

  ffmpegdec->frame = av_frame_alloc ();

  GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (ffmpegdec));
  gst_audio_decoder_set_use_default_pad_acceptcaps (GST_AUDIO_DECODER_CAST
      (ffmpegdec), TRUE);

  gst_audio_decoder_set_drainable (GST_AUDIO_DECODER (ffmpegdec), TRUE);
  gst_audio_decoder_set_needs_format (GST_AUDIO_DECODER (ffmpegdec), TRUE);
}
Ejemplo n.º 12
0
static gboolean
gst_v4l2_video_dec_sink_query (GstVideoDecoder * decoder, GstQuery * query)
{
  gboolean ret = TRUE;
  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);

  switch (GST_QUERY_TYPE (query)) {
    case GST_QUERY_CAPS:{
      GstCaps *filter, *result = NULL;
      GstPad *pad = GST_VIDEO_DECODER_SINK_PAD (decoder);
      gst_query_parse_caps (query, &filter);

      if (self->probed_sinkcaps)
        result = gst_caps_ref (self->probed_sinkcaps);
      else
        result = gst_pad_get_pad_template_caps (pad);

      if (filter) {
        GstCaps *tmp = result;
        result =
            gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
        gst_caps_unref (tmp);
      }

      GST_DEBUG_OBJECT (self, "Returning sink caps %" GST_PTR_FORMAT, result);

      gst_query_set_caps_result (query, result);
      gst_caps_unref (result);
      break;
    }

    default:
      ret = GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
      break;
  }

  return ret;
}
Ejemplo n.º 13
0
static GstFlowReturn
gst_vaapidecode_handle_frame (GstVideoDecoder * vdec,
    GstVideoCodecFrame * frame)
{
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstVaapiDecoderStatus status;
  GstFlowReturn ret;

  if (!decode->input_state)
    goto not_negotiated;

  /* Decode current frame */
  for (;;) {
    status = gst_vaapi_decoder_decode (decode->decoder, frame);
    if (status == GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE) {
      /* Make sure that there are no decoded frames waiting in the
         output queue. */
      ret = gst_vaapidecode_push_all_decoded_frames (decode);
      if (ret != GST_FLOW_OK)
        goto error_push_all_decoded_frames;

      g_mutex_lock (&decode->surface_ready_mutex);
      if (gst_vaapi_decoder_check_status (decode->decoder) ==
          GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE)
        g_cond_wait (&decode->surface_ready, &decode->surface_ready_mutex);
      g_mutex_unlock (&decode->surface_ready_mutex);
      continue;
    }
    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
      goto error_decode;
    break;
  }

  /* Note that gst_vaapi_decoder_decode cannot return success without
     completing the decode and pushing all decoded frames into the output
     queue */
  return gst_vaapidecode_push_all_decoded_frames (decode);

  /* ERRORS */
error_push_all_decoded_frames:
  {
    GST_ERROR ("push loop error while decoding %d", ret);
    gst_video_decoder_drop_frame (vdec, frame);
    return ret;
  }
error_decode:
  {
    GST_ERROR ("decode error %d", status);
    switch (status) {
      case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC:
      case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE:
      case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT:
        ret = GST_FLOW_NOT_SUPPORTED;
        break;
      default:
        ret = GST_FLOW_OK;
        GST_VIDEO_DECODER_ERROR (vdec, 1, STREAM, DECODE, ("Decoding error"),
            ("Decode error %d", status), ret);
        GST_INFO ("requesting upstream a key unit");
        gst_pad_push_event (GST_VIDEO_DECODER_SINK_PAD (decode),
            gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
                FALSE, 0));
        ret = GST_FLOW_OK;
        break;
    }
    gst_video_decoder_drop_frame (vdec, frame);
    return ret;
  }
not_negotiated:
  {
    GST_ERROR_OBJECT (decode, "not negotiated");
    ret = GST_FLOW_NOT_NEGOTIATED;
    gst_video_decoder_drop_frame (vdec, frame);
    return ret;
  }
}
static GstFlowReturn gst_openh264dec_handle_frame(GstVideoDecoder *decoder, GstVideoCodecFrame *frame)
{
    GstOpenh264Dec *openh264dec = GST_OPENH264DEC(decoder);
    GstMapInfo map_info;
    GstVideoCodecState *state;
    SBufferInfo dst_buf_info;
    DECODING_STATE ret;
    guint8 *yuvdata[3];
    GstFlowReturn flow_status;
    GstVideoFrame video_frame;
    guint actual_width, actual_height;
    guint i;
    guint8 *p;
    guint row_stride, component_width, component_height, src_width, row;

    if (frame) {
        if (!gst_buffer_map(frame->input_buffer, &map_info, GST_MAP_READ)) {
            GST_ERROR_OBJECT(openh264dec, "Cannot map input buffer!");
            return GST_FLOW_ERROR;
        }

        GST_LOG_OBJECT(openh264dec, "handle frame, %d", map_info.size > 4 ? map_info.data[4] & 0x1f : -1);

        memset (&dst_buf_info, 0, sizeof (SBufferInfo));
        ret = openh264dec->priv->decoder->DecodeFrame2(map_info.data, map_info.size, yuvdata, &dst_buf_info);

        if (ret == dsNoParamSets) {
            GST_DEBUG_OBJECT(openh264dec, "Requesting a key unit");
            gst_pad_push_event(GST_VIDEO_DECODER_SINK_PAD(decoder),
                gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
        }

        if (ret != dsErrorFree && ret != dsNoParamSets) {
            GST_DEBUG_OBJECT(openh264dec, "Requesting a key unit");
            gst_pad_push_event(GST_VIDEO_DECODER_SINK_PAD(decoder),
                               gst_video_event_new_upstream_force_key_unit(GST_CLOCK_TIME_NONE, FALSE, 0));
            GST_LOG_OBJECT(openh264dec, "error decoding nal, return code: %d", ret);
        }

        gst_buffer_unmap(frame->input_buffer, &map_info);
        gst_video_codec_frame_unref (frame);
        frame = NULL;
    } else {
        memset (&dst_buf_info, 0, sizeof (SBufferInfo));
        ret = openh264dec->priv->decoder->DecodeFrame2(NULL, 0, yuvdata, &dst_buf_info);
        if (ret != dsErrorFree)
            return GST_FLOW_EOS;
    }

    /* FIXME: openh264 has no way for us to get a connection
     * between the input and output frames, we just have to
     * guess based on the input. Fortunately openh264 can
     * only do baseline profile. */
    frame = gst_video_decoder_get_oldest_frame (decoder);
    if (!frame) {
      /* Can only happen in finish() */
      return GST_FLOW_EOS;
    }

    /* No output available yet */
    if (dst_buf_info.iBufferStatus != 1) {
        return (frame ? GST_FLOW_OK : GST_FLOW_EOS);
    }

    actual_width  = dst_buf_info.UsrData.sSystemBuffer.iWidth;
    actual_height = dst_buf_info.UsrData.sSystemBuffer.iHeight;

    if (!gst_pad_has_current_caps (GST_VIDEO_DECODER_SRC_PAD (openh264dec)) || actual_width != openh264dec->priv->width || actual_height != openh264dec->priv->height) {
        state = gst_video_decoder_set_output_state(decoder,
            GST_VIDEO_FORMAT_I420,
            actual_width,
            actual_height,
            openh264dec->priv->input_state);
        openh264dec->priv->width = actual_width;
        openh264dec->priv->height = actual_height;

        if (!gst_video_decoder_negotiate(decoder)) {
            GST_ERROR_OBJECT(openh264dec, "Failed to negotiate with downstream elements");
            return GST_FLOW_NOT_NEGOTIATED;
        }
    } else {
        state = gst_video_decoder_get_output_state(decoder);
    }

    flow_status = gst_video_decoder_allocate_output_frame(decoder, frame);
    if (flow_status != GST_FLOW_OK) {
        gst_video_codec_state_unref (state);
        return flow_status;
    }

    if (!gst_video_frame_map(&video_frame, &state->info, frame->output_buffer, GST_MAP_WRITE)) {
        GST_ERROR_OBJECT(openh264dec, "Cannot map output buffer!");
        gst_video_codec_state_unref (state);
        return GST_FLOW_ERROR;
    }

    for (i = 0; i < 3; i++) {
        p = GST_VIDEO_FRAME_COMP_DATA(&video_frame, i);
        row_stride = GST_VIDEO_FRAME_COMP_STRIDE(&video_frame, i);
        component_width = GST_VIDEO_FRAME_COMP_WIDTH(&video_frame, i);
        component_height = GST_VIDEO_FRAME_COMP_HEIGHT(&video_frame, i);
        src_width = i < 1 ? dst_buf_info.UsrData.sSystemBuffer.iStride[0] : dst_buf_info.UsrData.sSystemBuffer.iStride[1];
        for (row = 0; row < component_height; row++) {
            memcpy(p, yuvdata[i], component_width);
            p += row_stride;
            yuvdata[i] += src_width;
        }
    }
    gst_video_codec_state_unref (state);
    gst_video_frame_unmap(&video_frame);

    return gst_video_decoder_finish_frame(decoder, frame);
}