Exemplo n.º 1
0
static GstFlowReturn gst_libde265_dec_handle_frame (VIDEO_DECODER_BASE * parse,
    VIDEO_FRAME * frame)
{
    GstFlowReturn ret = GST_FLOW_OK;
    GstLibde265Dec *dec = GST_LIBDE265_DEC (parse);
    
    const struct de265_image *img = de265_get_next_picture(dec->ctx);
    if (img == NULL) {
        // need more data
        return GST_FLOW_OK;
    }
    
    ret = ALLOC_OUTPUT_FRAME(parse, frame);
    if (ret != GST_FLOW_OK) {
        return ret;
    }
    
    uint8_t *dest;
#if GST_CHECK_VERSION(1,0,0)
    GstMapInfo info;
    if (!gst_buffer_map(frame->output_buffer, &info, GST_MAP_WRITE)) {
        return GST_FLOW_ERROR;
    }
    
    dest = info.data;
#else
    dest = GST_BUFFER_DATA(frame->src_buffer);
#endif
    
    int plane;
    for (plane=0; plane<3; plane++) {
        int stride;
        int width = de265_get_image_width(img, plane);
        int height = de265_get_image_height(img, plane);
        const uint8_t *src = de265_get_image_plane(img, plane, &stride);
        if (stride == width) {
            memcpy(dest, src, height * stride);
            dest += (height * stride);
        } else {
            while (height--) {
                memcpy(dest, src, width);
                src += stride;
                dest += width;
            }
        }
    }
#if GST_CHECK_VERSION(1,0,0)
    gst_buffer_unmap(frame->output_buffer, &info);
    frame->pts = (GstClockTime) de265_get_image_PTS(img);
#endif
    
    return FINISH_FRAME (parse, frame);
}
Exemplo n.º 2
0
static GstFlowReturn
_gst_libde265_return_image (GstVideoDecoder * decoder,
    GstVideoCodecFrame * frame, const struct de265_image *img)
{
  GstLibde265Dec *dec = GST_LIBDE265_DEC (decoder);
  struct GstLibde265FrameRef *ref;
  GstFlowReturn result;
  GstVideoFrame outframe;
  GstVideoCodecFrame *out_frame;
  int frame_number;
  int plane;

  ref = (struct GstLibde265FrameRef *) de265_get_image_plane_user_data (img, 0);
  if (ref != NULL) {
    /* decoder is using direct rendering */
    out_frame = gst_video_codec_frame_ref (ref->frame);
    if (frame != NULL) {
      gst_video_codec_frame_unref (frame);
    }
    gst_buffer_replace (&out_frame->output_buffer, ref->buffer);
    gst_buffer_replace (&ref->buffer, NULL);
    return gst_video_decoder_finish_frame (decoder, out_frame);
  }

  result =
      _gst_libde265_image_available (decoder, de265_get_image_width (img, 0),
      de265_get_image_height (img, 0));
  if (result != GST_FLOW_OK) {
    GST_ERROR_OBJECT (dec, "Failed to notify about available image");
    return result;
  }

  frame_number = (uintptr_t) de265_get_image_user_data (img) - 1;
  if (frame_number != -1) {
    out_frame = gst_video_decoder_get_frame (decoder, frame_number);
  } else {
    out_frame = NULL;
  }
  if (frame != NULL) {
    gst_video_codec_frame_unref (frame);
  }

  if (out_frame == NULL) {
    GST_ERROR_OBJECT (dec, "No frame available to return");
    return GST_FLOW_ERROR;
  }

  result = gst_video_decoder_allocate_output_frame (decoder, out_frame);
  if (result != GST_FLOW_OK) {
    GST_ERROR_OBJECT (dec, "Failed to allocate output frame");
    return result;
  }

  g_assert (dec->output_state != NULL);
  if (!gst_video_frame_map (&outframe, &dec->output_state->info,
          out_frame->output_buffer, GST_MAP_WRITE)) {
    GST_ERROR_OBJECT (dec, "Failed to map output buffer");
    return GST_FLOW_ERROR;
  }

  for (plane = 0; plane < 3; plane++) {
    int width = de265_get_image_width (img, plane);
    int height = de265_get_image_height (img, plane);
    int srcstride = width;
    int dststride = GST_VIDEO_FRAME_COMP_STRIDE (&outframe, plane);
    const uint8_t *src = de265_get_image_plane (img, plane, &srcstride);
    uint8_t *dest = GST_VIDEO_FRAME_COMP_DATA (&outframe, plane);
    if (srcstride == width && dststride == width) {
      memcpy (dest, src, height * width);
    } else {
      while (height--) {
        memcpy (dest, src, width);
        src += srcstride;
        dest += dststride;
      }
    }
  }
  gst_video_frame_unmap (&outframe);
  return gst_video_decoder_finish_frame (decoder, out_frame);
}
Exemplo n.º 3
0
static int ff_libde265dec_decode(AVCodecContext *avctx,
                                 void *data, int *got_frame, AVPacket *avpkt)
{
    DE265Context *ctx = (DE265Context *) avctx->priv_data;
    AVFrame *picture = (AVFrame *) data;
    const struct de265_image *img;
    de265_error err;
    int ret;
    int64_t pts;
#if LIBDE265_NUMERIC_VERSION >= 0x00050000
    int more = 0;
#endif

    const uint8_t* src[4];
    int stride[4];

    if (avpkt->size > 0) {
        if (avpkt->pts != AV_NOPTS_VALUE) {
            pts = avpkt->pts;
        } else {
            pts = avctx->reordered_opaque;
        }

        // replace 4-byte length fields with NAL start codes
        uint8_t* avpkt_data = avpkt->data;
        uint8_t* avpkt_end = avpkt->data + avpkt->size;
        while (avpkt_data + 4 <= avpkt_end) {
            int nal_size = AV_RB32(avpkt_data);
#if LIBDE265_NUMERIC_VERSION < 0x00050000
            AV_WB32(avpkt_data, 0x00000001);
#else
            err = de265_push_NAL(ctx->decoder, avpkt_data + 4, nal_size, pts, NULL);
            if (err != DE265_OK) {
                const char *error = de265_get_error_text(err);
                av_log(avctx, AV_LOG_ERROR, "Failed to push data: %s\n", error);
                return AVERROR_INVALIDDATA;
            }
#endif
            avpkt_data += 4 + nal_size;
        }
#if LIBDE265_NUMERIC_VERSION >= 0x00050000
    } else {
        de265_flush_data(ctx->decoder);
#endif
    }

#if LIBDE265_NUMERIC_VERSION < 0x00050000
    // insert input packet PTS into sorted queue
    if (ctx->pts_queue_len < DE265_MAX_PTS_QUEUE) {
        int pos=0;
        while (ctx->pts_queue[pos] < pts &&
               pos<ctx->pts_queue_len) {
            pos++;
        }

        if (pos < ctx->pts_queue_len) {
            memmove(&ctx->pts_queue[pos+1], &ctx->pts_queue[pos],
                sizeof(int64_t) * (ctx->pts_queue_len - pos));
        }

        ctx->pts_queue[pos] = pts;
        ctx->pts_queue_len++;
        if (ctx->pts_queue_len > ctx->pts_min_queue_len) {
            ctx->pts_min_queue_len = ctx->pts_queue_len;
        }
    }

    err = de265_decode_data(ctx->decoder, avpkt->data, avpkt->size);
#else
    // decode as much as possible
    do {
        err = de265_decode(ctx->decoder, &more);
    } while (more && err == DE265_OK);
#endif

    switch (err) {
    case DE265_OK:
    case DE265_ERROR_IMAGE_BUFFER_FULL:
#if LIBDE265_NUMERIC_VERSION >= 0x00050000
    case DE265_ERROR_WAITING_FOR_INPUT_DATA:
#endif
        break;

    default:
        {
            const char *error  = de265_get_error_text(err);

            av_log(avctx, AV_LOG_ERROR, "Failed to decode frame: %s\n", error);
            return AVERROR_INVALIDDATA;
        }
    }

    if ((img = de265_get_next_picture(ctx->decoder)) != NULL) {
        int width;
        int height;
        if (de265_get_chroma_format(img) != de265_chroma_420) {
            av_log(avctx, AV_LOG_ERROR, "Unsupported output colorspace (%d)\n",
                   de265_get_chroma_format(img));
            return AVERROR_INVALIDDATA;
        }

        width  = de265_get_image_width(img,0);
        height = de265_get_image_height(img,0);
        if (width != avctx->width || height != avctx->height) {
            if (avctx->width != 0)
                av_log(avctx, AV_LOG_INFO, "dimension change! %dx%d -> %dx%d\n",
                       avctx->width, avctx->height, width, height);

            if (av_image_check_size(width, height, 0, avctx)) {
                return AVERROR_INVALIDDATA;
            }

            avcodec_set_dimensions(avctx, width, height);
        }
#if LIBDE265_NUMERIC_VERSION < 0x00050000
        if (ctx->pts_queue_len < ctx->pts_min_queue_len) {
            // fill pts queue to ensure reordering works
            return avpkt->size;
        }
#endif

        picture->width = avctx->width;
        picture->height = avctx->height;
        picture->format = avctx->pix_fmt;
        if ((ret = av_frame_get_buffer(picture, 32)) < 0) {
            return ret;
        }

        for (int i=0;i<4;i++) {
            src[i] = de265_get_image_plane(img,i, &stride[i]);
        }

        av_image_copy(picture->data, picture->linesize, src, stride,
                      avctx->pix_fmt, width, height);

        *got_frame = 1;

#if LIBDE265_NUMERIC_VERSION < 0x00050000
        // assign next PTS from queue
        if (ctx->pts_queue_len > 0) {
            picture->reordered_opaque = ctx->pts_queue[0];
            picture->pkt_pts = ctx->pts_queue[0];

            if (ctx->pts_queue_len>1) {
                memmove(&ctx->pts_queue[0], &ctx->pts_queue[1],
                    sizeof(int64_t) * (ctx->pts_queue_len-1));
            }

            ctx->pts_queue_len--;
        }
#else
        picture->reordered_opaque = de265_get_image_PTS(img);
        picture->pkt_pts = de265_get_image_PTS(img);
#endif
    }
    return avpkt->size;
}