Esempio n. 1
0
static GstFlowReturn
allocate_output_buffer (GstMsdkDec * thiz, GstBuffer ** buffer)
{
  GstFlowReturn flow;
  GstVideoCodecFrame *frame;
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (thiz);

  frame = gst_msdkdec_get_oldest_frame (decoder);
  if (!frame) {
    if (GST_PAD_IS_FLUSHING (decoder->srcpad))
      return GST_FLOW_FLUSHING;
    else
      return GST_FLOW_ERROR;
  }

  if (!frame->output_buffer) {
    flow = gst_video_decoder_allocate_output_frame (decoder, frame);
    if (flow != GST_FLOW_OK) {
      gst_video_codec_frame_unref (frame);
      return flow;
    }
  }

  *buffer = gst_buffer_ref (frame->output_buffer);
  gst_buffer_replace (&frame->output_buffer, NULL);
  gst_video_codec_frame_unref (frame);
  return GST_FLOW_OK;
}
/**
 * gst_vaapi_decoder_get_surface:
 * @decoder: a #GstVaapiDecoder
 * @out_proxy_ptr: the next decoded surface as a #GstVaapiSurfaceProxy
 *
 * Flushes encoded buffers to the decoder and returns a decoded
 * surface, if any.
 *
 * On successful return, *@out_proxy_ptr contains the decoded surface
 * as a #GstVaapiSurfaceProxy. The caller owns this object, so
 * gst_vaapi_surface_proxy_unref() shall be called after usage.
 *
 * Return value: a #GstVaapiDecoderStatus
 */
GstVaapiDecoderStatus
gst_vaapi_decoder_get_surface (GstVaapiDecoder * decoder,
    GstVaapiSurfaceProxy ** out_proxy_ptr)
{
  GstVideoCodecFrame *frame;
  GstVaapiDecoderStatus status;

  g_return_val_if_fail (decoder != NULL,
      GST_VAAPI_DECODER_STATUS_ERROR_INVALID_PARAMETER);
  g_return_val_if_fail (out_proxy_ptr != NULL,
      GST_VAAPI_DECODER_STATUS_ERROR_INVALID_PARAMETER);

  do {
    frame = pop_frame (decoder, 0);
    while (frame) {
      if (!GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (frame)) {
        GstVaapiSurfaceProxy *const proxy = frame->user_data;
        proxy->timestamp = frame->pts;
        proxy->duration = frame->duration;
        *out_proxy_ptr = gst_vaapi_surface_proxy_ref (proxy);
        gst_video_codec_frame_unref (frame);
        return GST_VAAPI_DECODER_STATUS_SUCCESS;
      }
      gst_video_codec_frame_unref (frame);
      frame = pop_frame (decoder, 0);
    }
    status = decode_step (decoder);
  } while (status == GST_VAAPI_DECODER_STATUS_SUCCESS);

  *out_proxy_ptr = NULL;
  return status;
}
Esempio n. 3
0
static GstFlowReturn
theora_push_packet (GstTheoraEnc * enc, ogg_packet * packet)
{
    GstVideoEncoder *benc;
    GstFlowReturn ret;
    GstVideoCodecFrame *frame;

    benc = GST_VIDEO_ENCODER (enc);

    frame = gst_video_encoder_get_oldest_frame (benc);
    if (gst_video_encoder_allocate_output_frame (benc, frame,
            packet->bytes) != GST_FLOW_OK) {
        GST_WARNING_OBJECT (enc, "Could not allocate buffer");
        gst_video_codec_frame_unref (frame);
        ret = GST_FLOW_ERROR;
        goto done;
    }

    gst_buffer_fill (frame->output_buffer, 0, packet->packet, packet->bytes);

    /* the second most significant bit of the first data byte is cleared
     * for keyframes */
    if (packet->bytes > 0 && (packet->packet[0] & 0x40) == 0) {
        GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
    } else {
        GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
    }
    enc->packetno++;

    ret = gst_video_encoder_finish_frame (benc, frame);

done:
    return ret;
}
Esempio n. 4
0
static GstFlowReturn
gst_vaapidecode_push_all_decoded_frames (GstVaapiDecode * decode)
{
  GstVideoDecoder *const vdec = GST_VIDEO_DECODER (decode);
  GstVaapiDecoderStatus status;
  GstVideoCodecFrame *out_frame;
  GstFlowReturn ret;

  for (;;) {
    status = gst_vaapi_decoder_get_frame (decode->decoder, &out_frame);

    switch (status) {
      case GST_VAAPI_DECODER_STATUS_SUCCESS:
        /* GstVaapiDecode's queue adds an extra reference */
        gst_video_codec_frame_unref (out_frame);
        ret = gst_vaapidecode_push_decoded_frame (vdec, out_frame);
        if (ret != GST_FLOW_OK)
          return ret;
        break;
      case GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA:
        return GST_FLOW_OK;
      default:
        GST_VIDEO_DECODER_ERROR (vdec, 1, STREAM, DECODE, ("Decoding failed"),
            ("Unknown decoding error"), ret);
        return ret;
    }
  }
  g_assert_not_reached ();
}
Esempio n. 5
0
static void
gst_vaapidecode_purge (GstVaapiDecode * decode)
{
  GstVaapiDecoderStatus status;

  if (!decode->decoder)
    return;

  status = gst_vaapi_decoder_flush (decode->decoder);
  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
    GST_INFO_OBJECT (decode, "failed to flush decoder (status %d)", status);

  /* Purge all decoded frames as we don't need them (e.g. flush and close)
   * Releasing the frames is important, otherwise the frames are not
   * freed. */
  do {
    GstVideoCodecFrame *frame = NULL;

    status =
        gst_vaapi_decoder_get_frame_with_timeout (decode->decoder, &frame, 0);
    if (frame) {
      gst_video_decoder_release_frame (GST_VIDEO_DECODER (decode), frame);
      gst_video_codec_frame_unref (frame);
    }
  } while (status == GST_VAAPI_DECODER_STATUS_SUCCESS);
}
Esempio n. 6
0
static GstFlowReturn
gst_aml_vdec_handle_frame(GstVideoDecoder *dec, GstVideoCodecFrame *frame)
{
	GstAmlVdec *amlvdec = GST_AMLVDEC(dec);
	GstBuffer *outbuffer;
	GstFlowReturn ret;
	GstVideoCodecState *state;

	GST_DEBUG_OBJECT(dec, "%s %d", __FUNCTION__, __LINE__);

	if (G_UNLIKELY(!frame))
		return GST_FLOW_OK;

	ret = gst_video_decoder_allocate_output_frame(dec, frame);
	if (G_UNLIKELY(ret != GST_FLOW_OK)) {
		GST_ERROR_OBJECT(dec, "failed to allocate output frame");
		ret = GST_FLOW_ERROR;
		gst_video_codec_frame_unref(frame);
		goto done;
	}

	gst_aml_vdec_decode(amlvdec, frame->input_buffer);
	gst_video_decoder_finish_frame(dec, frame);
//	gst_video_codec_frame_unref(frame);
//	gst_video_decoder_release_frame(dec, frame);
	ret = GST_FLOW_OK;
done:
	return ret;
}
Esempio n. 7
0
static GstFlowReturn
gst_msdkdec_finish_task (GstMsdkDec * thiz, MsdkDecTask * task)
{
  GstVideoDecoder *decoder = GST_VIDEO_DECODER (thiz);
  GstFlowReturn flow;
  GstVideoCodecFrame *frame;
  MsdkSurface *surface;
  mfxStatus status;
  GList *l;

  if (G_LIKELY (task->sync_point)) {
    status =
        MFXVideoCORE_SyncOperation (gst_msdk_context_get_session
        (thiz->context), task->sync_point, 300000);
    if (status != MFX_ERR_NONE) {
      GST_ERROR_OBJECT (thiz, "failed to do sync operation");
      return GST_FLOW_ERROR;
    }
  }

  if (G_LIKELY (task->sync_point || (task->surface && task->decode_only))) {
    gboolean decode_only = task->decode_only;

    frame = gst_msdkdec_get_oldest_frame (decoder);

    l = g_list_find_custom (thiz->decoded_msdk_surfaces, task->surface,
        _find_msdk_surface);
    if (l) {
      surface = l->data;
    } else {
      GST_ERROR_OBJECT (thiz, "Couldn't find the cached MSDK surface");
      return GST_FLOW_ERROR;
    }

    if (G_LIKELY (frame)) {
      if (G_LIKELY (surface->copy.buffer == NULL)) {
        frame->output_buffer = gst_buffer_ref (surface->buf);
      } else {
        gst_video_frame_copy (&surface->copy, &surface->data);
        frame->output_buffer = gst_buffer_ref (surface->copy.buffer);
      }
    }

    free_surface (thiz, surface);
    task->sync_point = NULL;
    task->surface = NULL;
    task->decode_only = FALSE;

    if (!frame)
      return GST_FLOW_FLUSHING;
    gst_video_codec_frame_unref (frame);

    if (decode_only)
      GST_VIDEO_CODEC_FRAME_SET_DECODE_ONLY (frame);
    flow = gst_video_decoder_finish_frame (decoder, frame);
    return flow;
  }
  return GST_FLOW_OK;
}
Esempio n. 8
0
static GstFlowReturn
gst_mfc_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame)
{
  GstMFCDec *self = GST_MFC_DEC (decoder);
  GstFlowReturn ret = GST_FLOW_OK;

  GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);

  if ((ret = gst_mfc_dec_queue_input (self, frame)) != GST_FLOW_OK) {
    gst_video_codec_frame_unref (frame);
    return ret;
  }

  gst_video_codec_frame_unref (frame);

  return gst_mfc_dec_dequeue_output (self);
}
Esempio n. 9
0
static void
gst_libde265_dec_release_frame_ref (struct GstLibde265FrameRef *ref)
{
  if (ref->mapped) {
    gst_video_frame_unmap (&ref->vframe);
  }
  gst_video_codec_frame_unref (ref->frame);
  gst_buffer_replace (&ref->buffer, NULL);
  g_free (ref);
}
Esempio n. 10
0
static void
gst_x265_enc_dequeue_all_frames (GstX265Enc * enc)
{
  GList *l;

  for (l = enc->pending_frames; l; l = l->next) {
    FrameData *fdata = l->data;

    gst_video_frame_unmap (&fdata->vframe);
    gst_video_codec_frame_unref (fdata->frame);
    g_slice_free (FrameData, fdata);
  }
  g_list_free (enc->pending_frames);
  enc->pending_frames = NULL;
}
Esempio n. 11
0
static GstFlowReturn
gst_av1_enc_handle_frame (GstVideoEncoder * encoder, GstVideoCodecFrame * frame)
{
  GstAV1Enc *av1enc = GST_AV1_ENC_CAST (encoder);
  aom_image_t raw;
  int flags = 0;
  GstFlowReturn ret = GST_FLOW_OK;
  GstVideoFrame vframe;

  if (!aom_img_alloc (&raw, AOM_IMG_FMT_I420, av1enc->aom_cfg.g_w,
          av1enc->aom_cfg.g_h, 1)) {
    GST_ERROR_OBJECT (encoder, "Failed to initialize encoder");
    return FALSE;
  }

  gst_video_frame_map (&vframe, &av1enc->input_state->info,
      frame->input_buffer, GST_MAP_READ);
  gst_av1_enc_fill_image (av1enc, &vframe, &raw);
  gst_video_frame_unmap (&vframe);

  if (av1enc->keyframe_dist >= 30) {
    av1enc->keyframe_dist = 0;
    flags |= AOM_EFLAG_FORCE_KF;
  }
  av1enc->keyframe_dist++;

  g_mutex_lock (&av1enc->encoder_lock);
  if (aom_codec_encode (&av1enc->encoder, &raw, frame->pts, 1, flags)
      != AOM_CODEC_OK) {
    gst_av1_codec_error (&av1enc->encoder, "Failed to encode frame");
    ret = GST_FLOW_ERROR;
  }
  g_mutex_unlock (&av1enc->encoder_lock);

  aom_img_free (&raw);
  gst_video_codec_frame_unref (frame);

  if (ret == GST_FLOW_ERROR)
    return ret;

  ret = gst_av1_enc_process (av1enc);

  if (ret == GST_FLOW_CUSTOM_SUCCESS)
    ret = GST_FLOW_OK;

  return ret;
}
Esempio n. 12
0
static GstFlowReturn
gst_omx_h264_enc_handle_output_frame (GstOMXVideoEnc * enc, GstOMXPort * port,
    GstOMXBuffer * buf, GstVideoCodecFrame * frame)
{
  GstOMXH264Enc *self = GST_OMX_H264_ENC (enc);

  if (buf->omx_buf->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
    /* The codec data is SPS/PPS with a startcode => bytestream stream format
     * For bytestream stream format the SPS/PPS is only in-stream and not
     * in the caps!
     */
    if (buf->omx_buf->nFilledLen >= 4 &&
        GST_READ_UINT32_BE (buf->omx_buf->pBuffer +
            buf->omx_buf->nOffset) == 0x00000001) {
      GstBuffer *hdrs;
      GstMapInfo map = GST_MAP_INFO_INIT;

      GST_DEBUG_OBJECT (self, "got codecconfig in byte-stream format");

      hdrs = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen);

      gst_buffer_map (hdrs, &map, GST_MAP_WRITE);
      memcpy (map.data,
          buf->omx_buf->pBuffer + buf->omx_buf->nOffset,
          buf->omx_buf->nFilledLen);
      gst_buffer_unmap (hdrs, &map);
      self->headers = g_list_append (self->headers, hdrs);

      if (frame)
        gst_video_codec_frame_unref (frame);

      return GST_FLOW_OK;
    }
  } else if (self->headers) {
    gst_video_encoder_set_headers (GST_VIDEO_ENCODER (self), self->headers);
    self->headers = NULL;
  }

  return
      GST_OMX_VIDEO_ENC_CLASS
      (gst_omx_h264_enc_parent_class)->handle_output_frame (enc, port, buf,
      frame);
}
Esempio n. 13
0
static GstVideoCodecFrame *
gst_msdkdec_get_oldest_frame (GstVideoDecoder * decoder)
{
  GstVideoCodecFrame *frame = NULL, *old_frame = NULL;
  GList *frames, *l;
  gint count = 0;

  frames = gst_video_decoder_get_frames (decoder);

  for (l = frames; l != NULL; l = l->next) {
    GstVideoCodecFrame *f = l->data;

    if (!GST_CLOCK_TIME_IS_VALID (f->pts)) {
      GST_INFO
          ("Frame doesn't have a valid pts yet, Use gst_video_decoder_get_oldest_frame()"
          "with out considering the PTS for selecting the frame to be finished");
      old_frame = gst_video_decoder_get_oldest_frame (decoder);
      break;
    }

    if (!frame || frame->pts > f->pts)
      frame = f;

    count++;
  }

  if (old_frame)
    frame = old_frame;

  if (frame) {
    GST_LOG_OBJECT (decoder,
        "Oldest frame is %d %" GST_TIME_FORMAT " and %d frames left",
        frame->system_frame_number, GST_TIME_ARGS (frame->pts), count - 1);
    gst_video_codec_frame_ref (frame);
  }

  if (old_frame)
    gst_video_codec_frame_unref (old_frame);

  g_list_free_full (frames, (GDestroyNotify) gst_video_codec_frame_unref);

  return frame;
}
Esempio n. 14
0
static void
gst_x265_enc_dequeue_frame (GstX265Enc * enc, GstVideoCodecFrame * frame)
{
  GList *l;

  for (l = enc->pending_frames; l; l = l->next) {
    FrameData *fdata = l->data;

    if (fdata->frame != frame)
      continue;

    gst_video_frame_unmap (&fdata->vframe);
    gst_video_codec_frame_unref (fdata->frame);
    g_slice_free (FrameData, fdata);

    enc->pending_frames = g_list_delete_link (enc->pending_frames, l);
    return;
  }
}
Esempio n. 15
0
static GstFlowReturn
gst_rsvg_dec_handle_frame (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame)
{
  GstRsvgDec *rsvg = GST_RSVG_DEC (decoder);
  gboolean ret;

  ret = gst_rsvg_decode_image (rsvg, frame->input_buffer, frame);
  switch (ret) {
    case GST_FLOW_OK:
      ret = gst_video_decoder_finish_frame (decoder, frame);
      break;
    default:
      gst_video_codec_frame_unref (frame);
      break;
  }

  GST_LOG_OBJECT (rsvg, "Handle frame done");
  return ret;
}
static GstFlowReturn
gst_video_decoder_tester_handle_frame (GstVideoDecoder * dec,
    GstVideoCodecFrame * frame)
{
  GstVideoDecoderTester *dectester = (GstVideoDecoderTester *) dec;
  guint64 input_num;
  guint8 *data;
  gint size;
  GstMapInfo map;

  gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ);

  input_num = *((guint64 *) map.data);

  if ((input_num == dectester->last_buf_num + 1
          && dectester->last_buf_num != -1)
      || !GST_BUFFER_FLAG_IS_SET (frame->input_buffer,
          GST_BUFFER_FLAG_DELTA_UNIT)) {

    /* the output is gray8 */
    size = TEST_VIDEO_WIDTH * TEST_VIDEO_HEIGHT;
    data = g_malloc0 (size);

    memcpy (data, map.data, sizeof (guint64));

    frame->output_buffer = gst_buffer_new_wrapped (data, size);
    frame->pts = GST_BUFFER_PTS (frame->input_buffer);
    frame->duration = GST_BUFFER_DURATION (frame->input_buffer);
    dectester->last_buf_num = input_num;
    if (!GST_BUFFER_FLAG_IS_SET (frame->input_buffer,
            GST_BUFFER_FLAG_DELTA_UNIT))
      dectester->last_kf_num = input_num;
  }

  gst_buffer_unmap (frame->input_buffer, &map);

  if (frame->output_buffer)
    return gst_video_decoder_finish_frame (dec, frame);
  gst_video_codec_frame_unref (frame);
  return GST_FLOW_OK;
}
static GstFlowReturn
theora_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
{
  GstTheoraDec *dec;
  GstFlowReturn res;

  dec = GST_THEORA_DEC (bdec);

  res = theora_dec_decode_buffer (dec, frame->input_buffer, frame);
  switch (res) {
    case GST_FLOW_OK:
      res = gst_video_decoder_finish_frame (bdec, frame);
      break;
    case GST_CUSTOM_FLOW_DROP:
      res = gst_video_decoder_drop_frame (bdec, frame);
      break;
    default:
      gst_video_codec_frame_unref (frame);
      break;
  }

  return res;
}
Esempio n. 18
0
static GstFlowReturn
_gst_libde265_return_image (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame, const struct de265_image *img)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);
  struct GstLibde265FrameRef *ref;
  GstFlowReturn result;
  GstVideoFrame outframe;
  GstVideoCodecFrame *out_frame;
  int frame_number;
  int plane;

  ref = (struct GstLibde265FrameRef *) de265_get_image_plane_user_data (img, 0);
  if (ref != NULL) {
    /* decoder is using direct rendering */
    out_frame = gst_video_codec_frame_ref (ref->frame);
    if (frame != NULL) {
      gst_video_codec_frame_unref (frame);
    }
    gst_buffer_replace (&out_frame->output_buffer, ref->buffer);
    gst_buffer_replace (&ref->buffer, NULL);
    return gst_video_decoder_finish_frame (decoder, out_frame);
  }

  result =
      _gst_libde265_image_available (decoder, de265_get_image_width (img, 0),
      de265_get_image_height (img, 0));
  if (result != GST_FLOW_OK) {
    GST_ERROR_OBJECT (dec, "Failed to notify about available image");
    return result;
  }

  frame_number = (uintptr_t) de265_get_image_user_data (img) - 1;
  if (frame_number != -1) {
    out_frame = gst_video_decoder_get_frame (decoder, frame_number);
  } else {
    out_frame = NULL;
  }
  if (frame != NULL) {
    gst_video_codec_frame_unref (frame);
  }

  if (out_frame == NULL) {
    GST_ERROR_OBJECT (dec, "No frame available to return");
    return GST_FLOW_ERROR;
  }

  result = gst_video_decoder_allocate_output_frame (decoder, out_frame);
  if (result != GST_FLOW_OK) {
    GST_ERROR_OBJECT (dec, "Failed to allocate output frame");
    return result;
  }

  g_assert (dec->output_state != NULL);
  if (!gst_video_frame_map (&outframe, &dec->output_state->info,
          out_frame->output_buffer, GST_MAP_WRITE)) {
    GST_ERROR_OBJECT (dec, "Failed to map output buffer");
    return GST_FLOW_ERROR;
  }

  for (plane = 0; plane < 3; plane++) {
    int width = de265_get_image_width (img, plane);
    int height = de265_get_image_height (img, plane);
    int srcstride = width;
    int dststride = GST_VIDEO_FRAME_COMP_STRIDE (&outframe, plane);
    const uint8_t *src = de265_get_image_plane (img, plane, &srcstride);
    uint8_t *dest = GST_VIDEO_FRAME_COMP_DATA (&outframe, plane);
    if (srcstride == width && dststride == width) {
      memcpy (dest, src, height * width);
    } else {
      while (height--) {
        memcpy (dest, src, width);
        src += srcstride;
        dest += dststride;
      }
    }
  }
  gst_video_frame_unmap (&outframe);
  return gst_video_decoder_finish_frame (decoder, out_frame);
}
Esempio n. 19
0
static GstFlowReturn
gst_x265_enc_encode_frame (GstX265Enc * encoder, x265_picture * pic_in,
    GstVideoCodecFrame * input_frame, guint32 * i_nal, gboolean send)
{
  GstVideoCodecFrame *frame = NULL;
  GstBuffer *out_buf = NULL;
  x265_picture pic_out;
  x265_nal *nal;
  int i_size, i, offset;
  int encoder_return;
  GstFlowReturn ret = GST_FLOW_OK;
  gboolean update_latency = FALSE;

  if (G_UNLIKELY (encoder->x265enc == NULL)) {
    if (input_frame)
      gst_video_codec_frame_unref (input_frame);
    return GST_FLOW_NOT_NEGOTIATED;
  }

  GST_OBJECT_LOCK (encoder);
  if (encoder->reconfig) {
    // x265_encoder_reconfig is not yet implemented thus we shut down and re-create encoder
    gst_x265_enc_init_encoder (encoder);
    update_latency = TRUE;
  }

  if (pic_in && input_frame) {
    if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (input_frame)) {
      GST_INFO_OBJECT (encoder, "Forcing key frame");
      pic_in->sliceType = X265_TYPE_IDR;
    }
  }
  GST_OBJECT_UNLOCK (encoder);

  if (G_UNLIKELY (update_latency))
    gst_x265_enc_set_latency (encoder);

  encoder_return = x265_encoder_encode (encoder->x265enc,
      &nal, i_nal, pic_in, &pic_out);

  GST_DEBUG_OBJECT (encoder, "encoder result (%d) with %u nal units",
      encoder_return, *i_nal);

  if (encoder_return < 0) {
    GST_ELEMENT_ERROR (encoder, STREAM, ENCODE, ("Encode x265 frame failed."),
        ("x265_encoder_encode return code=%d", encoder_return));
    ret = GST_FLOW_ERROR;
    /* Make sure we finish this frame */
    frame = input_frame;
    goto out;
  }

  /* Input frame is now queued */
  if (input_frame)
    gst_video_codec_frame_unref (input_frame);

  if (!*i_nal) {
    ret = GST_FLOW_OK;
    GST_LOG_OBJECT (encoder, "no output yet");
    goto out;
  }

  frame = gst_video_encoder_get_frame (GST_VIDEO_ENCODER (encoder),
      GPOINTER_TO_INT (pic_out.userData));
  g_assert (frame || !send);

  GST_DEBUG_OBJECT (encoder,
      "output picture ready POC=%d system=%d frame found %d", pic_out.poc,
      GPOINTER_TO_INT (pic_out.userData), frame != NULL);

  if (!send || !frame) {
    GST_LOG_OBJECT (encoder, "not sending (%d) or frame not found (%d)", send,
        frame != NULL);
    ret = GST_FLOW_OK;
    goto out;
  }

  i_size = 0;
  offset = 0;
  for (i = 0; i < *i_nal; i++)
    i_size += nal[i].sizeBytes;
  out_buf = gst_buffer_new_allocate (NULL, i_size, NULL);
  for (i = 0; i < *i_nal; i++) {
    gst_buffer_fill (out_buf, offset, nal[i].payload, nal[i].sizeBytes);
    offset += nal[i].sizeBytes;
  }

  frame->output_buffer = out_buf;

  if (encoder->push_header) {
    GstBuffer *header;

    header = gst_x265_enc_get_header_buffer (encoder);
    frame->output_buffer = gst_buffer_append (header, frame->output_buffer);
    encoder->push_header = FALSE;
  }

  GST_LOG_OBJECT (encoder,
      "output: dts %" G_GINT64_FORMAT " pts %" G_GINT64_FORMAT,
      (gint64) pic_out.dts, (gint64) pic_out.pts);

  frame->dts = pic_out.dts + encoder->dts_offset;

out:
  if (frame) {
    gst_x265_enc_dequeue_frame (encoder, frame);
    ret = gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (encoder), frame);
  }

  return ret;
}
static GstFlowReturn
gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame)
{
  GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
  GstFlowReturn ret = GST_FLOW_OK;

  GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);

  if (G_UNLIKELY (!g_atomic_int_get (&self->active)))
    goto flushing;

  if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2output))) {
    if (!self->input_state)
      goto not_negotiated;
    if (!gst_v4l2_object_set_format (self->v4l2output, self->input_state->caps))
      goto not_negotiated;
  }

  if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2capture))) {
    GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool);
    GstVideoInfo info;
    GstVideoCodecState *output_state;
    GstBuffer *codec_data;

    GST_DEBUG_OBJECT (self, "Sending header");

    codec_data = self->input_state->codec_data;

    /* We are running in byte-stream mode, so we don't know the headers, but
     * we need to send something, otherwise the decoder will refuse to
     * intialize.
     */
    if (codec_data) {
      gst_buffer_ref (codec_data);
    } else {
      codec_data = frame->input_buffer;
      frame->input_buffer = NULL;
    }

    /* Ensure input internal pool is active */
    if (!gst_buffer_pool_is_active (pool)) {
      GstStructure *config = gst_buffer_pool_get_config (pool);
      gst_buffer_pool_config_set_params (config, self->input_state->caps,
          self->v4l2output->info.size, 2, 2);

      /* There is no reason to refuse this config */
      if (!gst_buffer_pool_set_config (pool, config))
        goto activate_failed;

      if (!gst_buffer_pool_set_active (pool, TRUE))
        goto activate_failed;
    }

    GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
    ret =
        gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->
            v4l2output->pool), &codec_data);
    GST_VIDEO_DECODER_STREAM_LOCK (decoder);

    gst_buffer_unref (codec_data);

    if (!gst_v4l2_object_acquire_format (self->v4l2capture, &info))
      goto not_negotiated;

    output_state = gst_video_decoder_set_output_state (decoder,
        info.finfo->format, info.width, info.height, self->input_state);

    /* Copy the rest of the information, there might be more in the future */
    output_state->info.interlace_mode = info.interlace_mode;
    gst_video_codec_state_unref (output_state);

    if (!gst_video_decoder_negotiate (decoder)) {
      if (GST_PAD_IS_FLUSHING (decoder->srcpad))
        goto flushing;
      else
        goto not_negotiated;
    }

    /* Ensure our internal pool is activated */
    if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool),
            TRUE))
      goto activate_failed;
  }

  if (g_atomic_int_get (&self->processing) == FALSE) {
    /* It's possible that the processing thread stopped due to an error */
    if (self->output_flow != GST_FLOW_OK &&
        self->output_flow != GST_FLOW_FLUSHING) {
      GST_DEBUG_OBJECT (self, "Processing loop stopped with error, leaving");
      ret = self->output_flow;
      goto drop;
    }

    GST_DEBUG_OBJECT (self, "Starting decoding thread");

    /* Start the processing task, when it quits, the task will disable input
     * processing to unlock input if draining, or prevent potential block */
    g_atomic_int_set (&self->processing, TRUE);
    if (!gst_pad_start_task (decoder->srcpad,
            (GstTaskFunction) gst_v4l2_video_dec_loop, self,
            (GDestroyNotify) gst_v4l2_video_dec_loop_stopped))
      goto start_task_failed;
  }

  if (frame->input_buffer) {
    GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
    ret =
        gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->v4l2output->
            pool), &frame->input_buffer);
    GST_VIDEO_DECODER_STREAM_LOCK (decoder);

    if (ret == GST_FLOW_FLUSHING) {
      if (g_atomic_int_get (&self->processing) == FALSE)
        ret = self->output_flow;
      goto drop;
    } else if (ret != GST_FLOW_OK) {
      goto process_failed;
    }

    /* No need to keep input arround */
    gst_buffer_replace (&frame->input_buffer, NULL);
  }

  gst_video_codec_frame_unref (frame);
  return ret;

  /* ERRORS */
not_negotiated:
  {
    GST_ERROR_OBJECT (self, "not negotiated");
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto drop;
  }
activate_failed:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
        (_("Failed to allocate required memory.")),
        ("Buffer pool activation failed"));
    ret = GST_FLOW_ERROR;
    goto drop;
  }
flushing:
  {
    ret = GST_FLOW_FLUSHING;
    goto drop;
  }

start_task_failed:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
        (_("Failed to start decoding thread.")), (NULL));
    g_atomic_int_set (&self->processing, FALSE);
    ret = GST_FLOW_ERROR;
    goto drop;
  }
process_failed:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
        (_("Failed to process frame.")),
        ("Maybe be due to not enough memory or failing driver"));
    ret = GST_FLOW_ERROR;
    goto drop;
  }
drop:
  {
    gst_video_decoder_drop_frame (decoder, frame);
    return ret;
  }
}
Esempio n. 21
0
static int
gst_libde265_dec_get_buffer (de265_decoder_context * ctx,
    struct de265_image_spec *spec, struct de265_image *img, void *userdata)
{
  GstVideoDecoder *base = (GstVideoDecoder *) userdata;
  GstLibde265Dec *dec = GST_LIBDE265_DEC (base);
  GstVideoCodecFrame *frame = NULL;
  int i;
  int width = spec->width;
  int height = spec->height;
  GstFlowReturn ret;
  struct GstLibde265FrameRef *ref;
  GstVideoInfo *info;
  int frame_number;

  frame_number = (uintptr_t) de265_get_image_user_data (img) - 1;
  if (G_UNLIKELY (frame_number == -1)) {
    /* should not happen... */
    GST_WARNING_OBJECT (base, "Frame has no number assigned!");
    goto fallback;
  }

  frame = gst_video_decoder_get_frame (base, frame_number);
  if (G_UNLIKELY (frame == NULL)) {
    /* should not happen... */
    GST_WARNING_OBJECT (base, "Couldn't get codec frame!");
    goto fallback;
  }

  if (width % spec->alignment) {
    width += spec->alignment - (width % spec->alignment);
  }
  if (width != spec->visible_width || height != spec->visible_height) {
    /* clipping not supported for now */
    goto fallback;
  }

  ret = _gst_libde265_image_available (base, width, height);
  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_ERROR_OBJECT (dec, "Failed to notify about available image");
    goto fallback;
  }

  ret =
      gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (dec), frame);
  if (G_UNLIKELY (ret != GST_FLOW_OK)) {
    GST_ERROR_OBJECT (dec, "Failed to allocate output buffer");
    goto fallback;
  }

  ref = (struct GstLibde265FrameRef *) g_malloc0 (sizeof (*ref));
  g_assert (ref != NULL);
  ref->decoder = base;
  ref->frame = frame;

  gst_buffer_replace (&ref->buffer, frame->output_buffer);
  gst_buffer_replace (&frame->output_buffer, NULL);

  info = &dec->output_state->info;
  if (!gst_video_frame_map (&ref->vframe, info, ref->buffer, GST_MAP_READWRITE)) {
    GST_ERROR_OBJECT (dec, "Failed to map frame output buffer");
    goto error;
  }

  ref->mapped = TRUE;
  if (GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe,
          0) < width * GST_VIDEO_FRAME_COMP_PSTRIDE (&ref->vframe, 0)) {
    GST_DEBUG_OBJECT (dec, "plane 0: pitch too small (%d/%d*%d)",
        GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe, 0), width,
        GST_VIDEO_FRAME_COMP_PSTRIDE (&ref->vframe, 0));
    goto error;
  }

  if (GST_VIDEO_FRAME_COMP_HEIGHT (&ref->vframe, 0) < height) {
    GST_DEBUG_OBJECT (dec, "plane 0: lines too few (%d/%d)",
        GST_VIDEO_FRAME_COMP_HEIGHT (&ref->vframe, 0), height);
    goto error;
  }

  for (i = 0; i < 3; i++) {
    uint8_t *data;
    int stride = GST_VIDEO_FRAME_PLANE_STRIDE (&ref->vframe, i);
    if (stride % spec->alignment) {
      GST_DEBUG_OBJECT (dec, "plane %d: pitch not aligned (%d%%%d)",
          i, stride, spec->alignment);
      goto error;
    }

    data = GST_VIDEO_FRAME_PLANE_DATA (&ref->vframe, i);
    if ((uintptr_t) (data) % spec->alignment) {
      GST_DEBUG_OBJECT (dec, "plane %d not aligned", i);
      goto error;
    }

    de265_set_image_plane (img, i, data, stride, ref);
  }
  return 1;

error:
  gst_libde265_dec_release_frame_ref (ref);
  frame = NULL;

fallback:
  if (frame != NULL) {
    gst_video_codec_frame_unref (frame);
  }
  return de265_get_default_image_allocation_functions ()->get_buffer (ctx,
      spec, img, userdata);
}
Esempio n. 22
0
static GstFlowReturn
gst_openh264enc_handle_frame (GstVideoEncoder * encoder,
    GstVideoCodecFrame * frame)
{
  GstOpenh264Enc *openh264enc = GST_OPENH264ENC (encoder);
  SSourcePicture *src_pic = NULL;
  GstVideoFrame video_frame;
  gboolean force_keyframe;
  gint ret;
  SFrameBSInfo frame_info;
  gfloat fps;
  GstMapInfo map;
  gint i, j;
  gsize buf_length = 0;

  if (frame) {
    src_pic = new SSourcePicture;

    if (src_pic == NULL) {
      if (frame)
        gst_video_codec_frame_unref (frame);
      return GST_FLOW_ERROR;
    }
    //fill default src_pic
    src_pic->iColorFormat = videoFormatI420;
    src_pic->uiTimeStamp = frame->pts / GST_MSECOND;
  }

  openh264enc->frame_count++;
  if (frame) {
    if (G_UNLIKELY (openh264enc->frame_count == 1)) {
      openh264enc->time_per_frame = (GST_SECOND / openh264enc->framerate);
      openh264enc->previous_timestamp = frame->pts;
    } else {
      openh264enc->time_per_frame =
          openh264enc->time_per_frame * 0.8 + (frame->pts -
          openh264enc->previous_timestamp) * 0.2;
      openh264enc->previous_timestamp = frame->pts;
      if (openh264enc->frame_count % 10 == 0) {
        fps = GST_SECOND / (gdouble) openh264enc->time_per_frame;
        openh264enc->encoder->SetOption (ENCODER_OPTION_FRAME_RATE, &fps);
      }
    }
  }

  if (frame) {
    gst_video_frame_map (&video_frame, &openh264enc->input_state->info,
        frame->input_buffer, GST_MAP_READ);
    src_pic->iPicWidth = GST_VIDEO_FRAME_WIDTH (&video_frame);
    src_pic->iPicHeight = GST_VIDEO_FRAME_HEIGHT (&video_frame);
    src_pic->iStride[0] = GST_VIDEO_FRAME_COMP_STRIDE (&video_frame, 0);
    src_pic->iStride[1] = GST_VIDEO_FRAME_COMP_STRIDE (&video_frame, 1);
    src_pic->iStride[2] = GST_VIDEO_FRAME_COMP_STRIDE (&video_frame, 2);
    src_pic->pData[0] = GST_VIDEO_FRAME_COMP_DATA (&video_frame, 0);
    src_pic->pData[1] = GST_VIDEO_FRAME_COMP_DATA (&video_frame, 1);
    src_pic->pData[2] = GST_VIDEO_FRAME_COMP_DATA (&video_frame, 2);

    force_keyframe = GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame);
    if (force_keyframe) {
      openh264enc->encoder->ForceIntraFrame (true);
      GST_DEBUG_OBJECT (openh264enc,
          "Got force key unit event, next frame coded as intra picture");
    }
  }

  memset (&frame_info, 0, sizeof (SFrameBSInfo));
  ret = openh264enc->encoder->EncodeFrame (src_pic, &frame_info);
  if (ret != cmResultSuccess) {
    if (frame) {
      gst_video_frame_unmap (&video_frame);
      gst_video_codec_frame_unref (frame);
      delete src_pic;
      GST_ELEMENT_ERROR (openh264enc, STREAM, ENCODE,
          ("Could not encode frame"), ("Openh264 returned %d", ret));
      return GST_FLOW_ERROR;
    } else {
      return GST_FLOW_EOS;
    }
  }

  if (videoFrameTypeSkip == frame_info.eFrameType) {
    if (frame) {
      gst_video_frame_unmap (&video_frame);
      gst_video_encoder_finish_frame (encoder, frame);
      delete src_pic;
    }

    return GST_FLOW_OK;
  }

  if (frame) {
    gst_video_frame_unmap (&video_frame);
    gst_video_codec_frame_unref (frame);
    delete src_pic;
    src_pic = NULL;
    frame = NULL;
  }

  /* FIXME: openh264 has no way for us to get a connection
   * between the input and output frames, we just have to
   * guess based on the input */
  frame = gst_video_encoder_get_oldest_frame (encoder);
  if (!frame) {
    GST_ELEMENT_ERROR (openh264enc, STREAM, ENCODE,
        ("Could not encode frame"), ("openh264enc returned %d", ret));
    gst_video_codec_frame_unref (frame);
    return GST_FLOW_ERROR;
  }

  if (videoFrameTypeIDR == frame_info.eFrameType) {
    GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
  } else {
    GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
  }

  for (i = 0; i < frame_info.iLayerNum; i++) {
    for (j = 0; j < frame_info.sLayerInfo[i].iNalCount; j++) {
      buf_length += frame_info.sLayerInfo[i].pNalLengthInByte[j];
    }
  }

  frame->output_buffer =
      gst_video_encoder_allocate_output_buffer (encoder, buf_length);
  gst_buffer_map (frame->output_buffer, &map, GST_MAP_WRITE);

  buf_length = 0;
  for (i = 0; i < frame_info.iLayerNum; i++) {
    gsize layer_size = 0;
    for (j = 0; j < frame_info.sLayerInfo[i].iNalCount; j++) {
      layer_size += frame_info.sLayerInfo[i].pNalLengthInByte[j];
    }
    memcpy (map.data + buf_length, frame_info.sLayerInfo[i].pBsBuf, layer_size);
    buf_length += layer_size;
  }

  gst_buffer_unmap (frame->output_buffer, &map);

  GST_LOG_OBJECT (openh264enc, "openh264 picture %scoded OK!",
      (ret != cmResultSuccess) ? "NOT " : "");

  return gst_video_encoder_finish_frame (encoder, frame);
}
Esempio n. 23
0
static GstFlowReturn
gst_openjpeg_enc_handle_frame (GstVideoEncoder * encoder,
    GstVideoCodecFrame * frame)
{
  GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder);
  GstFlowReturn ret = GST_FLOW_OK;
#ifdef HAVE_OPENJPEG_1
  opj_cinfo_t *enc;
  GstMapInfo map;
  guint length;
  opj_cio_t *io;
#else
  opj_codec_t *enc;
  opj_stream_t *stream;
  MemStream mstream;
#endif
  opj_image_t *image;
  GstVideoFrame vframe;

  GST_DEBUG_OBJECT (self, "Handling frame");

  enc = opj_create_compress (self->codec_format);
  if (!enc)
    goto initialization_error;

#ifdef HAVE_OPENJPEG_1
  if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
          GST_LEVEL_TRACE)) {
    opj_event_mgr_t callbacks;

    callbacks.error_handler = gst_openjpeg_enc_opj_error;
    callbacks.warning_handler = gst_openjpeg_enc_opj_warning;
    callbacks.info_handler = gst_openjpeg_enc_opj_info;
    opj_set_event_mgr ((opj_common_ptr) enc, &callbacks, self);
  } else {
    opj_set_event_mgr ((opj_common_ptr) enc, NULL, NULL);
  }
#else
  if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >=
          GST_LEVEL_TRACE)) {
    opj_set_info_handler (enc, gst_openjpeg_enc_opj_info, self);
    opj_set_warning_handler (enc, gst_openjpeg_enc_opj_warning, self);
    opj_set_error_handler (enc, gst_openjpeg_enc_opj_error, self);
  } else {
    opj_set_info_handler (enc, NULL, NULL);
    opj_set_warning_handler (enc, NULL, NULL);
    opj_set_error_handler (enc, NULL, NULL);
  }
#endif

  if (!gst_video_frame_map (&vframe, &self->input_state->info,
          frame->input_buffer, GST_MAP_READ))
    goto map_read_error;

  image = gst_openjpeg_enc_fill_image (self, &vframe);
  if (!image)
    goto fill_image_error;
  gst_video_frame_unmap (&vframe);

  opj_setup_encoder (enc, &self->params, image);

#ifdef HAVE_OPENJPEG_1
  io = opj_cio_open ((opj_common_ptr) enc, NULL, 0);
  if (!io)
    goto open_error;

  if (!opj_encode (enc, io, image, NULL))
    goto encode_error;

  opj_image_destroy (image);

  length = cio_tell (io);

  ret =
      gst_video_encoder_allocate_output_frame (encoder, frame,
      length + (self->is_jp2c ? 8 : 0));
  if (ret != GST_FLOW_OK)
    goto allocate_error;

  gst_buffer_fill (frame->output_buffer, self->is_jp2c ? 8 : 0, io->buffer,
      length);
  if (self->is_jp2c) {
    gst_buffer_map (frame->output_buffer, &map, GST_MAP_WRITE);
    GST_WRITE_UINT32_BE (map.data, length + 8);
    GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c'));
    gst_buffer_unmap (frame->output_buffer, &map);
  }

  opj_cio_close (io);
  opj_destroy_compress (enc);
#else
  stream = opj_stream_create (4096, OPJ_FALSE);
  if (!stream)
    goto open_error;

  mstream.allocsize = 4096;
  mstream.data = g_malloc (mstream.allocsize);
  mstream.offset = 0;
  mstream.size = 0;

  opj_stream_set_read_function (stream, read_fn);
  opj_stream_set_write_function (stream, write_fn);
  opj_stream_set_skip_function (stream, skip_fn);
  opj_stream_set_seek_function (stream, seek_fn);
  opj_stream_set_user_data (stream, &mstream);
  opj_stream_set_user_data_length (stream, mstream.size);

  if (!opj_start_compress (enc, image, stream))
    goto encode_error;

  if (!opj_encode (enc, stream))
    goto encode_error;

  if (!opj_end_compress (enc, stream))
    goto encode_error;

  opj_image_destroy (image);
  opj_stream_destroy (stream);
  opj_destroy_codec (enc);

  frame->output_buffer = gst_buffer_new ();

  if (self->is_jp2c) {
    GstMapInfo map;
    GstMemory *mem;

    mem = gst_allocator_alloc (NULL, 8, NULL);
    gst_memory_map (mem, &map, GST_MAP_WRITE);
    GST_WRITE_UINT32_BE (map.data, mstream.size + 8);
    GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c'));
    gst_memory_unmap (mem, &map);
    gst_buffer_append_memory (frame->output_buffer, mem);
  }

  gst_buffer_append_memory (frame->output_buffer,
      gst_memory_new_wrapped (0, mstream.data, mstream.allocsize, 0,
          mstream.size, NULL, (GDestroyNotify) g_free));
#endif

  ret = gst_video_encoder_finish_frame (encoder, frame);

  return ret;

initialization_error:
  {
    gst_video_codec_frame_unref (frame);
    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to initialize OpenJPEG encoder"), (NULL));
    return GST_FLOW_ERROR;
  }
map_read_error:
  {
#ifdef HAVE_OPENJPEG_1
    opj_destroy_compress (enc);
#else
    opj_destroy_codec (enc);
#endif
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, CORE, FAILED,
        ("Failed to map input buffer"), (NULL));
    return GST_FLOW_ERROR;
  }
fill_image_error:
  {
#ifdef HAVE_OPENJPEG_1
    opj_destroy_compress (enc);
#else
    opj_destroy_codec (enc);
#endif
    gst_video_frame_unmap (&vframe);
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to fill OpenJPEG image"), (NULL));
    return GST_FLOW_ERROR;
  }
open_error:
  {
    opj_image_destroy (image);
#ifdef HAVE_OPENJPEG_1
    opj_destroy_compress (enc);
#else
    opj_destroy_codec (enc);
#endif
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, LIBRARY, INIT,
        ("Failed to open OpenJPEG data"), (NULL));
    return GST_FLOW_ERROR;
  }
encode_error:
  {
#ifdef HAVE_OPENJPEG_1
    opj_cio_close (io);
    opj_image_destroy (image);
    opj_destroy_compress (enc);
#else
    opj_stream_destroy (stream);
    g_free (mstream.data);
    opj_image_destroy (image);
    opj_destroy_codec (enc);
#endif
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, STREAM, ENCODE,
        ("Failed to encode OpenJPEG stream"), (NULL));
    return GST_FLOW_ERROR;
  }
#ifdef HAVE_OPENJPEG_1
allocate_error:
  {
    opj_cio_close (io);
    opj_destroy_compress (enc);
    gst_video_codec_frame_unref (frame);

    GST_ELEMENT_ERROR (self, CORE, FAILED,
        ("Failed to allocate output buffer"), (NULL));
    return ret;
  }
#endif
}
Esempio n. 24
0
static GstFlowReturn
gst_amc_video_enc_handle_frame (GstVideoEncoder * encoder,
    GstVideoCodecFrame * frame)
{
  GstAmcVideoEnc *self;
  gint idx;
  GstAmcBuffer *buf;
  GstAmcBufferInfo buffer_info;
  GstClockTime timestamp, duration, timestamp_offset = 0;
  BufferIdentification *id;
  GError *err = NULL;

  self = GST_AMC_VIDEO_ENC (encoder);

  GST_DEBUG_OBJECT (self, "Handling frame");

  if (!self->started) {
    GST_ERROR_OBJECT (self, "Codec not started yet");
    gst_video_codec_frame_unref (frame);
    return GST_FLOW_NOT_NEGOTIATED;
  }

  if (self->flushing)
    goto flushing;

  if (self->downstream_flow_ret != GST_FLOW_OK)
    goto downstream_error;

  timestamp = frame->pts;
  duration = frame->duration;

again:
  /* Make sure to release the base class stream lock, otherwise
   * _loop() can't call _finish_frame() and we might block forever
   * because no input buffers are released */
  GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
  /* Wait at most 100ms here, some codecs don't fail dequeueing if
   * the codec is flushing, causing deadlocks during shutdown */
  idx = gst_amc_codec_dequeue_input_buffer (self->codec, 100000, &err);
  GST_VIDEO_ENCODER_STREAM_LOCK (self);

  if (idx < 0) {
    if (self->flushing || self->downstream_flow_ret == GST_FLOW_FLUSHING) {
      g_clear_error (&err);
      goto flushing;
    }

    switch (idx) {
      case INFO_TRY_AGAIN_LATER:
        GST_DEBUG_OBJECT (self, "Dequeueing input buffer timed out");
        goto again;             /* next try */
        break;
      case G_MININT:
        GST_ERROR_OBJECT (self, "Failed to dequeue input buffer");
        goto dequeue_error;
      default:
        g_assert_not_reached ();
        break;
    }

    goto again;
  }

  if (self->flushing) {
    memset (&buffer_info, 0, sizeof (buffer_info));
    gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, NULL);
    goto flushing;
  }

  if (self->downstream_flow_ret != GST_FLOW_OK) {
    memset (&buffer_info, 0, sizeof (buffer_info));
    gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err);
    if (err && !self->flushing)
      GST_ELEMENT_WARNING_FROM_ERROR (self, err);
    g_clear_error (&err);
    goto downstream_error;
  }

  /* Now handle the frame */

  /* Copy the buffer content in chunks of size as requested
   * by the port */
  buf = gst_amc_codec_get_input_buffer (self->codec, idx, &err);
  if (!buf)
    goto failed_to_get_input_buffer;

  memset (&buffer_info, 0, sizeof (buffer_info));
  buffer_info.offset = 0;
  buffer_info.size = MIN (self->color_format_info.frame_size, buf->size);
  gst_amc_buffer_set_position_and_limit (buf, NULL, buffer_info.offset,
      buffer_info.size);

  if (!gst_amc_video_enc_fill_buffer (self, frame->input_buffer, buf,
          &buffer_info)) {
    memset (&buffer_info, 0, sizeof (buffer_info));
    gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err);
    if (err && !self->flushing)
      GST_ELEMENT_WARNING_FROM_ERROR (self, err);
    g_clear_error (&err);
    gst_amc_buffer_free (buf);
    buf = NULL;
    goto buffer_fill_error;
  }

  gst_amc_buffer_free (buf);
  buf = NULL;

  if (timestamp != GST_CLOCK_TIME_NONE) {
    buffer_info.presentation_time_us =
        gst_util_uint64_scale (timestamp + timestamp_offset, 1, GST_USECOND);
    self->last_upstream_ts = timestamp + timestamp_offset;
  }
  if (duration != GST_CLOCK_TIME_NONE)
    self->last_upstream_ts += duration;

  id = buffer_identification_new (timestamp + timestamp_offset);
  if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame))
    buffer_info.flags |= BUFFER_FLAG_SYNC_FRAME;
  gst_video_codec_frame_set_user_data (frame, id,
      (GDestroyNotify) buffer_identification_free);

  GST_DEBUG_OBJECT (self,
      "Queueing buffer %d: size %d time %" G_GINT64_FORMAT " flags 0x%08x",
      idx, buffer_info.size, buffer_info.presentation_time_us,
      buffer_info.flags);
  if (!gst_amc_codec_queue_input_buffer (self->codec, idx, &buffer_info, &err)) {
    if (self->flushing) {
      g_clear_error (&err);
      goto flushing;
    }
    goto queue_error;
  }

  self->drained = FALSE;

  gst_video_codec_frame_unref (frame);

  return self->downstream_flow_ret;

downstream_error:
  {
    GST_ERROR_OBJECT (self, "Downstream returned %s",
        gst_flow_get_name (self->downstream_flow_ret));

    gst_video_codec_frame_unref (frame);
    return self->downstream_flow_ret;
  }
failed_to_get_input_buffer:
  {
    GST_ELEMENT_ERROR_FROM_ERROR (self, err);
    gst_video_codec_frame_unref (frame);
    return GST_FLOW_ERROR;
  }
buffer_fill_error:
  {
    GST_ELEMENT_ERROR (self, RESOURCE, WRITE, (NULL),
        ("Failed to write input into the amc buffer(write %dB to a %dB buffer)",
            self->color_format_info.frame_size, buf->size));
    gst_video_codec_frame_unref (frame);
    return GST_FLOW_ERROR;
  }
dequeue_error:
  {
    GST_ELEMENT_ERROR_FROM_ERROR (self, err);
    gst_video_codec_frame_unref (frame);
    return GST_FLOW_ERROR;
  }
queue_error:
  {
    GST_ELEMENT_ERROR_FROM_ERROR (self, err);
    gst_video_codec_frame_unref (frame);
    return GST_FLOW_ERROR;
  }
flushing:
  {
    GST_DEBUG_OBJECT (self, "Flushing -- returning FLUSHING");
    gst_video_codec_frame_unref (frame);
    return GST_FLOW_FLUSHING;
  }
}
Esempio n. 25
0
static GstFlowReturn
gst_mfc_dec_dequeue_output (GstMFCDec * self)
{
  GstFlowReturn ret = GST_FLOW_OK;
  gint mfc_ret;
  GstVideoCodecFrame *frame = NULL;
  GstBuffer *outbuf = NULL;
  struct mfc_buffer *mfc_outbuf = NULL;
  gint width, height;
  gint crop_left, crop_top, crop_width, crop_height;
  gint src_ystride, src_uvstride;
  GstVideoCodecState *state = NULL;
  gint64 deadline;
  struct timeval timestamp;

  if (!self->initialized) {
    GST_DEBUG_OBJECT (self, "Initializing decoder");
    if ((mfc_ret = mfc_dec_init_output (self->context, 1)) < 0)
      goto initialize_error;
    self->initialized = TRUE;
  }

  while ((mfc_ret = mfc_dec_output_available (self->context)) > 0) {
    GST_DEBUG_OBJECT (self, "Dequeueing output");

    mfc_dec_get_output_size (self->context, &width, &height);
    mfc_dec_get_output_stride (self->context, &src_ystride, &src_uvstride);
    mfc_dec_get_crop_size (self->context, &crop_left, &crop_top, &crop_width,
        &crop_height);

    GST_DEBUG_OBJECT (self, "Have output: width %d, height %d, "
        "Y stride %d, UV stride %d, "
        "crop_left %d, crop_right %d, "
        "crop_width %d, crop_height %d", width, height, src_ystride,
        src_uvstride, crop_left, crop_top, crop_width, crop_height);

    state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));

    if (!state || self->width != width || self->height != height ||
        self->src_stride[0] != src_ystride
        || self->src_stride[1] != src_uvstride
        || self->crop_left != self->crop_left || self->crop_top != crop_top
        || self->crop_width != crop_width || self->crop_height != crop_height) {
      self->width = width;
      self->height = height;
      self->crop_left = crop_left;
      self->crop_top = crop_top;
      self->crop_width = crop_width;
      self->crop_height = crop_height;
      self->src_stride[0] = src_ystride;
      self->src_stride[1] = src_uvstride;
      self->src_stride[2] = 0;

      if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self)))
        goto negotiate_error;

      if (state)
        gst_video_codec_state_unref (state);
      state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));
    }

    if ((mfc_ret =
            mfc_dec_dequeue_output (self->context, &mfc_outbuf,
                &timestamp)) < 0) {
      if (mfc_ret == -2) {
        GST_DEBUG_OBJECT (self, "Timeout dequeueing output, trying again");
        mfc_ret =
            mfc_dec_dequeue_output (self->context, &mfc_outbuf, &timestamp);
      }

      if (mfc_ret < 0)
        goto dequeue_error;
    }

    g_assert (mfc_outbuf != NULL);

    GST_DEBUG_OBJECT (self, "Got output buffer with ID %ld", timestamp.tv_sec);

    frame = NULL;
    if (timestamp.tv_sec != -1)
      frame =
          gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
          timestamp.tv_sec);

    if (frame) {
      deadline =
          gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (self),
          frame);
      if (deadline < 0) {
        GST_LOG_OBJECT (self,
            "Dropping too late frame: deadline %" G_GINT64_FORMAT, deadline);
        ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
        frame = NULL;
        outbuf = NULL;
        goto done;
      }

      ret =
          gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER (self),
          frame);

      if (ret != GST_FLOW_OK)
        goto alloc_error;

      outbuf = frame->output_buffer;
    } else {
      GST_WARNING_OBJECT (self, "Didn't find a frame for ID %ld",
          timestamp.tv_sec);

      outbuf =
          gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));

      if (!outbuf) {
        ret = GST_FLOW_ERROR;
        goto alloc_error;
      }
    }

    ret = gst_mfc_dec_fill_outbuf (self, outbuf, mfc_outbuf, state);
    if (ret != GST_FLOW_OK)
      goto fill_error;

    if (frame) {
      ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
      frame = NULL;
      outbuf = NULL;
    } else {
      ret = gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), outbuf);
      outbuf = NULL;
    }

    if (ret != GST_FLOW_OK)
      GST_INFO_OBJECT (self, "Pushing frame returned: %s",
          gst_flow_get_name (ret));

  done:
    if (mfc_outbuf) {
      if ((mfc_ret = mfc_dec_enqueue_output (self->context, mfc_outbuf)) < 0)
        goto enqueue_error;
    }

    if (!frame && outbuf)
      gst_buffer_unref (outbuf);
    if (frame)
      gst_video_codec_frame_unref (frame);
    if (state)
      gst_video_codec_state_unref (state);

    frame = NULL;
    outbuf = NULL;

    if (ret != GST_FLOW_OK)
      break;
  }

  return ret;

initialize_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, INIT, ("Failed to initialize output"),
        ("mfc_dec_init: %d", mfc_ret));
    ret = GST_FLOW_ERROR;
    goto done;
  }

negotiate_error:
  {
    GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, ("Failed to negotiate"),
        (NULL));
    ret = GST_FLOW_NOT_NEGOTIATED;
    goto done;
  }

dequeue_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to dequeue output buffer"), ("mfc_dec_dequeue_output: %d",
            mfc_ret));
    ret = GST_FLOW_ERROR;
    goto done;
  }

alloc_error:
  {
    GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to allocate output buffer"),
        (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

fill_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED, ("Failed to fill output buffer"),
        (NULL));
    ret = GST_FLOW_ERROR;
    goto done;
  }

enqueue_error:
  {
    GST_ELEMENT_ERROR (self, LIBRARY, FAILED,
        ("Failed to enqueue output buffer"), ("mfc_dec_enqueue_output: %d",
            mfc_ret));
    ret = GST_FLOW_ERROR;
    goto done;
  }
}
Esempio n. 26
0
static GstFlowReturn
gst_amc_video_enc_handle_output_frame (GstAmcVideoEnc * self,
    GstAmcBuffer * buf, const GstAmcBufferInfo * buffer_info,
    GstVideoCodecFrame * frame)
{
  GstFlowReturn flow_ret = GST_FLOW_OK;
  GstVideoEncoder *encoder = GST_VIDEO_ENCODER_CAST (self);

  /* The BUFFER_FLAG_CODEC_CONFIG logic is borrowed from
   * gst-omx. see *_handle_output_frame in
   * gstomxvideoenc.c and gstomxh264enc.c */
  if ((buffer_info->flags & BUFFER_FLAG_CODEC_CONFIG)
      && buffer_info->size > 0) {
    GstStructure *s;
    GstVideoCodecState *state;

    state = gst_video_encoder_get_output_state (encoder);
    s = gst_caps_get_structure (state->caps, 0);
    if (!strcmp (gst_structure_get_name (s), "video/x-h264")) {
      gst_video_codec_state_unref (state);

      if (buffer_info->size > 4 &&
          GST_READ_UINT32_BE (buf->data + buffer_info->offset) == 0x00000001) {
        GList *l = NULL;
        GstBuffer *hdrs;

        GST_DEBUG_OBJECT (self, "got codecconfig in byte-stream format");

        hdrs = gst_buffer_new_and_alloc (buffer_info->size);
        gst_buffer_fill (hdrs, 0, buf->data + buffer_info->offset,
            buffer_info->size);

        l = g_list_append (l, hdrs);
        gst_video_encoder_set_headers (encoder, l);
      }
    } else {
      GstBuffer *codec_data;

      GST_DEBUG_OBJECT (self, "Handling codec data");

      codec_data = gst_buffer_new_and_alloc (buffer_info->size);
      gst_buffer_fill (codec_data, 0, buf->data + buffer_info->offset,
          buffer_info->size);
      state->codec_data = codec_data;
      gst_video_codec_state_unref (state);

      if (!gst_video_encoder_negotiate (encoder)) {
        gst_video_codec_frame_unref (frame);
        return GST_FLOW_NOT_NEGOTIATED;
      }

      return GST_FLOW_OK;
    }
  }

  if (buffer_info->size > 0) {
    GstBuffer *out_buf;
    GstPad *srcpad;

    srcpad = GST_VIDEO_ENCODER_SRC_PAD (encoder);
    out_buf =
        gst_video_encoder_allocate_output_buffer (encoder, buffer_info->size);
    gst_buffer_fill (out_buf, 0, buf->data + buffer_info->offset,
        buffer_info->size);

    GST_BUFFER_PTS (out_buf) =
        gst_util_uint64_scale (buffer_info->presentation_time_us, GST_USECOND,
        1);

    if (frame) {
      frame->output_buffer = out_buf;
      flow_ret = gst_video_encoder_finish_frame (encoder, frame);
    } else {
      /* This sometimes happens at EOS or if the input is not properly framed,
       * let's handle it gracefully by allocating a new buffer for the current
       * caps and filling it
       */

      GST_ERROR_OBJECT (self, "No corresponding frame found");
      flow_ret = gst_pad_push (srcpad, out_buf);
    }
  } else if (frame) {
    flow_ret = gst_video_encoder_finish_frame (encoder, frame);
  }

  return flow_ret;
}
static GstFlowReturn
gst_vaapidecode_push_decoded_frame (GstVideoDecoder * vdec,
    GstVideoCodecFrame * out_frame)
{
  GstVaapiDecode *const decode = GST_VAAPIDECODE (vdec);
  GstVaapiSurfaceProxy *proxy;
  GstFlowReturn ret;
  const GstVaapiRectangle *crop_rect;
  GstVaapiVideoMeta *meta;
  guint flags, out_flags = 0;

  if (!GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (out_frame)) {
    proxy = gst_video_codec_frame_get_user_data (out_frame);

    /* reconfigure if un-cropped surface resolution changed */
    if (is_surface_resolution_changed (vdec, GST_VAAPI_SURFACE_PROXY_SURFACE (proxy)))
      gst_vaapidecode_negotiate (decode);

    gst_vaapi_surface_proxy_set_destroy_notify (proxy,
        (GDestroyNotify) gst_vaapidecode_release, gst_object_ref (decode));

    ret = gst_video_decoder_allocate_output_frame (vdec, out_frame);
    if (ret != GST_FLOW_OK)
      goto error_create_buffer;

    meta = gst_buffer_get_vaapi_video_meta (out_frame->output_buffer);
    if (!meta)
      goto error_get_meta;
    gst_vaapi_video_meta_set_surface_proxy (meta, proxy);

    flags = gst_vaapi_surface_proxy_get_flags (proxy);
    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_CORRUPTED)
      out_flags |= GST_BUFFER_FLAG_CORRUPTED;
    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_INTERLACED) {
      out_flags |= GST_VIDEO_BUFFER_FLAG_INTERLACED;
      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_TFF)
        out_flags |= GST_VIDEO_BUFFER_FLAG_TFF;
      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_RFF)
        out_flags |= GST_VIDEO_BUFFER_FLAG_RFF;
      if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_ONEFIELD)
        out_flags |= GST_VIDEO_BUFFER_FLAG_ONEFIELD;
    }
    GST_BUFFER_FLAG_SET (out_frame->output_buffer, out_flags);

#if GST_CHECK_VERSION(1,5,0)
    /* First-in-bundle flag only appeared in 1.5 dev */
    if (flags & GST_VAAPI_SURFACE_PROXY_FLAG_FFB) {
      GST_BUFFER_FLAG_SET (out_frame->output_buffer,
          GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE);
    }
#endif

    crop_rect = gst_vaapi_surface_proxy_get_crop_rect (proxy);
    if (crop_rect) {
      GstVideoCropMeta *const crop_meta =
          gst_buffer_add_video_crop_meta (out_frame->output_buffer);
      if (crop_meta) {
        crop_meta->x = crop_rect->x;
        crop_meta->y = crop_rect->y;
        crop_meta->width = crop_rect->width;
        crop_meta->height = crop_rect->height;
      }
    }
#if (USE_GLX || USE_EGL)
    if (decode->has_texture_upload_meta)
      gst_buffer_ensure_texture_upload_meta (out_frame->output_buffer);
#endif
  }

  ret = gst_video_decoder_finish_frame (vdec, out_frame);
  if (ret != GST_FLOW_OK)
    goto error_commit_buffer;

  gst_video_codec_frame_unref (out_frame);
  return GST_FLOW_OK;

  /* ERRORS */
error_create_buffer:
  {
    const GstVaapiID surface_id =
        gst_vaapi_surface_get_id (GST_VAAPI_SURFACE_PROXY_SURFACE (proxy));

    GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
        ("Failed to create sink buffer"),
        ("video sink failed to create video buffer for proxy'ed "
            "surface %" GST_VAAPI_ID_FORMAT, GST_VAAPI_ID_ARGS (surface_id)));
    gst_video_decoder_drop_frame (vdec, out_frame);
    gst_video_codec_frame_unref (out_frame);
    return GST_FLOW_ERROR;
  }
error_get_meta:
  {
    GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
        ("Failed to get vaapi video meta attached to video buffer"),
        ("Failed to get vaapi video meta attached to video buffer"));
    gst_video_decoder_drop_frame (vdec, out_frame);
    gst_video_codec_frame_unref (out_frame);
    return GST_FLOW_ERROR;
  }
error_commit_buffer:
  {
    GST_INFO_OBJECT (decode, "downstream element rejected the frame (%s [%d])",
        gst_flow_get_name (ret), ret);
    gst_video_codec_frame_unref (out_frame);
    return ret;
  }
}
Esempio n. 28
0
static GstFlowReturn
theora_enc_handle_frame (GstVideoEncoder * benc, GstVideoCodecFrame * frame)
{
    GstTheoraEnc *enc;
    ogg_packet op;
    GstClockTime timestamp, running_time;
    GstFlowReturn ret;
    gboolean force_keyframe;

    enc = GST_THEORA_ENC (benc);

    /* we keep track of two timelines.
     * - The timestamps from the incomming buffers, which we copy to the outgoing
     *   encoded buffers as-is. We need to do this as we simply forward the
     *   newsegment events.
     * - The running_time of the buffers, which we use to construct the granulepos
     *   in the packets.
     */
    timestamp = frame->pts;

    /* incoming buffers are clipped, so this should be positive */
    running_time =
        gst_segment_to_running_time (&GST_VIDEO_ENCODER_INPUT_SEGMENT (enc),
                                     GST_FORMAT_TIME, timestamp);
    g_return_val_if_fail (running_time >= 0 || timestamp < 0, GST_FLOW_ERROR);

    GST_OBJECT_LOCK (enc);
    if (enc->bitrate_changed) {
        long int bitrate = enc->video_bitrate;

        th_encode_ctl (enc->encoder, TH_ENCCTL_SET_BITRATE, &bitrate,
                       sizeof (long int));
        enc->bitrate_changed = FALSE;
    }

    if (enc->quality_changed) {
        long int quality = enc->video_quality;

        th_encode_ctl (enc->encoder, TH_ENCCTL_SET_QUALITY, &quality,
                       sizeof (long int));
        enc->quality_changed = FALSE;
    }

    /* see if we need to schedule a keyframe */
    force_keyframe = GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame);
    GST_OBJECT_UNLOCK (enc);

    if (enc->packetno == 0) {
        /* no packets written yet, setup headers */
        GstCaps *caps;
        GstBuffer *buf;
        GList *buffers = NULL;
        int result;
        GstVideoCodecState *state;

        enc->granulepos_offset = 0;
        enc->timestamp_offset = 0;

        GST_DEBUG_OBJECT (enc, "output headers");
        /* Theora streams begin with three headers; the initial header (with
           most of the codec setup parameters) which is mandated by the Ogg
           bitstream spec.  The second header holds any comment fields.  The
           third header holds the bitstream codebook.  We merely need to
           make the headers, then pass them to libtheora one at a time;
           libtheora handles the additional Ogg bitstream constraints */

        /* create the remaining theora headers */
        th_comment_clear (&enc->comment);
        th_comment_init (&enc->comment);

        while ((result =
                    th_encode_flushheader (enc->encoder, &enc->comment, &op)) > 0) {
            buf = theora_enc_buffer_from_header_packet (enc, &op);
            buffers = g_list_prepend (buffers, buf);
        }
        if (result < 0) {
            g_list_foreach (buffers, (GFunc) gst_buffer_unref, NULL);
            g_list_free (buffers);
            goto encoder_disabled;
        }

        buffers = g_list_reverse (buffers);

        /* mark buffers and put on caps */
        caps = gst_caps_new_empty_simple ("video/x-theora");
        caps = theora_set_header_on_caps (caps, buffers);
        state = gst_video_encoder_set_output_state (benc, caps, enc->input_state);

        GST_DEBUG ("here are the caps: %" GST_PTR_FORMAT, state->caps);

        gst_video_codec_state_unref (state);

        gst_video_encoder_negotiate (GST_VIDEO_ENCODER (enc));

        gst_video_encoder_set_headers (benc, buffers);

        theora_enc_reset_ts (enc, running_time, frame->presentation_frame_number);
    }

    {
        th_ycbcr_buffer ycbcr;
        gint res;
        GstVideoFrame vframe;

        if (force_keyframe) {
            theora_enc_reset (enc);
            theora_enc_reset_ts (enc, running_time, frame->presentation_frame_number);
        }

        if (enc->multipass_cache_fd
                && enc->multipass_mode == MULTIPASS_MODE_SECOND_PASS) {
            if (!theora_enc_read_multipass_cache (enc)) {
                ret = GST_FLOW_ERROR;
                goto multipass_read_failed;
            }
        }

        gst_video_frame_map (&vframe, &enc->input_state->info, frame->input_buffer,
                             GST_MAP_READ);
        theora_enc_init_buffer (ycbcr, &vframe);

        res = th_encode_ycbcr_in (enc->encoder, ycbcr);
        gst_video_frame_unmap (&vframe);

        /* none of the failure cases can happen here */
        g_assert (res == 0);

        if (enc->multipass_cache_fd
                && enc->multipass_mode == MULTIPASS_MODE_FIRST_PASS) {
            if (!theora_enc_write_multipass_cache (enc, FALSE, FALSE)) {
                ret = GST_FLOW_ERROR;
                goto multipass_write_failed;
            }
        }

        ret = GST_FLOW_OK;
        while (th_encode_packetout (enc->encoder, 0, &op)) {
            ret = theora_push_packet (enc, &op);
            if (ret != GST_FLOW_OK)
                goto beach;
        }
    }

beach:
    gst_video_codec_frame_unref (frame);
    return ret;

    /* ERRORS */
multipass_read_failed:
    {
        gst_video_codec_frame_unref (frame);
        return ret;
    }
multipass_write_failed:
    {
        gst_video_codec_frame_unref (frame);
        return ret;
    }
encoder_disabled:
    {
        gst_video_codec_frame_unref (frame);
        GST_ELEMENT_ERROR (enc, STREAM, ENCODE, (NULL),
                           ("libtheora has been compiled with the encoder disabled"));
        return GST_FLOW_ERROR;
    }
}
Esempio n. 29
0
static GstVaapiDecoderStatus
decode_step (GstVaapiDecoder * decoder)
{
  GstVaapiParserState *const ps = &decoder->parser_state;
  GstVaapiDecoderStatus status;
  GstBuffer *buffer;
  gboolean got_frame;
  guint got_unit_size, input_size;

  status = gst_vaapi_decoder_check_status (decoder);
  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
    return status;

  /* Fill adapter with all buffers we have in the queue */
  for (;;) {
    buffer = pop_buffer (decoder);
    if (!buffer)
      break;

    ps->at_eos = GST_BUFFER_IS_EOS (buffer);
    if (!ps->at_eos)
      gst_adapter_push (ps->input_adapter, buffer);
  }

  /* Parse and decode all decode units */
  input_size = gst_adapter_available (ps->input_adapter);
  if (input_size == 0) {
    if (ps->at_eos)
      return GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
  }

  do {
    if (!ps->current_frame) {
      ps->current_frame = g_slice_new0 (GstVideoCodecFrame);
      if (!ps->current_frame)
        return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
      ps->current_frame->ref_count = 1;
      ps->current_frame->system_frame_number = ps->current_frame_number++;
    }

    status = do_parse (decoder, ps->current_frame, ps->input_adapter,
        ps->at_eos, &got_unit_size, &got_frame);
    GST_DEBUG ("parse frame (status = %d)", status);
    if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
      if (status == GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA && ps->at_eos)
        status = GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
      break;
    }

    if (got_unit_size > 0) {
      buffer = gst_adapter_take_buffer (ps->input_adapter, got_unit_size);
      input_size -= got_unit_size;

      if (gst_adapter_available (ps->output_adapter) == 0) {
        ps->current_frame->pts = gst_adapter_prev_pts (ps->input_adapter, NULL);
      }
      gst_adapter_push (ps->output_adapter, buffer);
    }

    if (got_frame) {
      ps->current_frame->input_buffer =
          gst_adapter_take_buffer (ps->output_adapter,
          gst_adapter_available (ps->output_adapter));

      status = do_decode (decoder, ps->current_frame);
      GST_DEBUG ("decode frame (status = %d)", status);

      gst_video_codec_frame_unref (ps->current_frame);
      ps->current_frame = NULL;
      break;
    }
  } while (input_size > 0);
  return status;
}
Esempio n. 30
0
static GstFlowReturn
gst_mpeg2dec_handle_frame (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame)
{
  GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder);
  GstBuffer *buf = frame->input_buffer;
  GstMapInfo minfo;
  const mpeg2_info_t *info;
  mpeg2_state_t state;
  gboolean done = FALSE;
  GstFlowReturn ret = GST_FLOW_OK;

  GST_LOG_OBJECT (mpeg2dec, "received frame %d, timestamp %"
      GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT,
      frame->system_frame_number,
      GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (frame->duration));

  gst_buffer_ref (buf);
  if (!gst_buffer_map (buf, &minfo, GST_MAP_READ)) {
    GST_ERROR_OBJECT (mpeg2dec, "Failed to map input buffer");
    return GST_FLOW_ERROR;
  }

  info = mpeg2dec->info;

  GST_LOG_OBJECT (mpeg2dec, "calling mpeg2_buffer");
  mpeg2_buffer (mpeg2dec->decoder, minfo.data, minfo.data + minfo.size);
  GST_LOG_OBJECT (mpeg2dec, "calling mpeg2_buffer done");

  while (!done) {
    GST_LOG_OBJECT (mpeg2dec, "calling parse");
    state = mpeg2_parse (mpeg2dec->decoder);
    GST_DEBUG_OBJECT (mpeg2dec, "parse state %d", state);

    switch (state) {
#if MPEG2_RELEASE >= MPEG2_VERSION (0, 5, 0)
      case STATE_SEQUENCE_MODIFIED:
        GST_DEBUG_OBJECT (mpeg2dec, "sequence modified");
        mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_PICTURE;
        gst_mpeg2dec_clear_buffers (mpeg2dec);
        /* fall through */
#endif
      case STATE_SEQUENCE:
        ret = handle_sequence (mpeg2dec, info);
        /* if there is an error handling the sequence
         * reset the decoder, maybe something more elegant
         * could be done.
         */
        if (ret == GST_FLOW_ERROR) {
          GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE,
              ("decoding error"), ("Bad sequence header"), ret);
          gst_video_decoder_drop_frame (decoder, frame);
          gst_mpeg2dec_flush (decoder);
          goto done;
        }
        break;
      case STATE_SEQUENCE_REPEATED:
        GST_DEBUG_OBJECT (mpeg2dec, "sequence repeated");
        break;
      case STATE_GOP:
        GST_DEBUG_OBJECT (mpeg2dec, "gop");
        break;
      case STATE_PICTURE:
        ret = handle_picture (mpeg2dec, info, frame);
        break;
      case STATE_SLICE_1ST:
        GST_LOG_OBJECT (mpeg2dec, "1st slice of frame encountered");
        break;
      case STATE_PICTURE_2ND:
        GST_LOG_OBJECT (mpeg2dec,
            "Second picture header encountered. Decoding 2nd field");
        break;
#if MPEG2_RELEASE >= MPEG2_VERSION (0, 4, 0)
      case STATE_INVALID_END:
        GST_DEBUG_OBJECT (mpeg2dec, "invalid end");
#endif
      case STATE_END:
        GST_DEBUG_OBJECT (mpeg2dec, "end");
      case STATE_SLICE:
        GST_DEBUG_OBJECT (mpeg2dec, "display_fbuf:%p, discard_fbuf:%p",
            info->display_fbuf, info->discard_fbuf);
        if (info->display_fbuf && info->display_fbuf->id) {
          ret = handle_slice (mpeg2dec, info);
        } else {
          GST_DEBUG_OBJECT (mpeg2dec, "no picture to display");
        }
        if (info->discard_fbuf && info->discard_fbuf->id)
          gst_mpeg2dec_discard_buffer (mpeg2dec,
              GPOINTER_TO_INT (info->discard_fbuf->id) - 1);
        if (state != STATE_SLICE) {
          gst_mpeg2dec_clear_buffers (mpeg2dec);
        }
        break;
      case STATE_BUFFER:
        done = TRUE;
        break;
        /* error */
      case STATE_INVALID:
        GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE,
            ("decoding error"), ("Reached libmpeg2 invalid state"), ret);
        continue;
      default:
        GST_ERROR_OBJECT (mpeg2dec, "Unknown libmpeg2 state %d, FIXME", state);
        ret = GST_FLOW_OK;
        gst_video_codec_frame_unref (frame);
        goto done;
    }

    if (ret != GST_FLOW_OK) {
      GST_DEBUG_OBJECT (mpeg2dec, "exit loop, reason %s",
          gst_flow_get_name (ret));
      break;
    }
  }

  gst_video_codec_frame_unref (frame);

done:
  gst_buffer_unmap (buf, &minfo);
  gst_buffer_unref (buf);
  return ret;
}