Ejemplo n.º 1
0
static GstVaapiDecoderStatus
decode_picture (GstVaapiDecoderVp8 * decoder, const guchar * buf,
    guint buf_size)
{
  GstVaapiDecoderVp8Private *const priv = &decoder->priv;
  GstVaapiPicture *picture;
  GstVaapiDecoderStatus status;

  status = ensure_context (decoder);
  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
    return status;

  /* Create new picture */
  picture = GST_VAAPI_PICTURE_NEW (VP8, decoder);
  if (!picture) {
    GST_ERROR ("failed to allocate picture");
    return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
  }
  gst_vaapi_picture_replace (&priv->current_picture, picture);
  gst_vaapi_picture_unref (picture);

  status = ensure_quant_matrix (decoder, picture);
  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
    return status;

  status = ensure_probability_table (decoder, picture);
  if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
    return status;

  init_picture (decoder, picture);
  if (!fill_picture (decoder, picture))
    return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;

  return decode_slice (decoder, picture, buf, buf_size);
}
Ejemplo n.º 2
0
int h264_decode_nalu(DecodingContext_t *dc, const int64_t nalu_offset, const int64_t nalu_size)
{
    TRACE_1(H264, BLD_GREEN "h264_decode_nalu()" CLR_RESET);
    int retcode = FAILURE;

    // Goto correct data offset
    //bitstream_goto_offset(dc->bitstr, nalu_offset);
    buffer_feed_manual(dc->bitstr, nalu_offset, nalu_size);

    // Check header validity
    if (nalu_parse_header(dc->bitstr, dc->active_nalu))
    {
        TRACE_1(H264, "decode: " BLD_GREEN "NALU_TYPE %i (at %lli) " CLR_RESET,
                dc->active_nalu->nal_unit_type,
                bitstream_get_absolute_byte_offset(dc->bitstr));

        // Decode NAL Unit content
        switch (dc->active_nalu->nal_unit_type)
        {
            case NALU_TYPE_SLICE: //////////////////////////////////////
            {
                TRACE_1(H264, "This decoder only support IDR slice decoding!");
            }
            break;

            case NALU_TYPE_IDR: ////////////////////////////////////////
            {
                TRACE_INFO(H264, "> " BLD_GREEN "decodeIDR(%i at %lli)" CLR_RESET,
                           dc->idrCounter, bitstream_get_absolute_byte_offset(dc->bitstr));

                nalu_clean_sample(dc->bitstr);
                dc->IdrPicFlag = true;

                if (decode_slice(dc))
                {
                    retcode = SUCCESS;
                    dc->errorCounter = 0;
                    dc->idrCounter++;
                    dc->frameCounter++;
                }
                else
                    dc->errorCounter++;
            }
            break;

            case NALU_TYPE_AUD: ////////////////////////////////////////
            {
                nalu_clean_sample(dc->bitstr);

                aud_t aud;
                if (decodeAUD(dc->bitstr, &aud))
                {
                    retcode = SUCCESS;
                }
                else
                    dc->errorCounter++;
            }
            break;

            case NALU_TYPE_SEI: ////////////////////////////////////////
            {
                nalu_clean_sample(dc->bitstr);

                if (dc->active_sei != NULL)
                    free(dc->active_sei);

                dc->active_sei = (sei_t*)calloc(1, sizeof(sei_t));
                if (dc->active_sei)
                {
                    if (decodeSEI(dc->bitstr, dc->active_sei))
                    {
                        retcode = SUCCESS;
                        printSEI(NULL);
                    }
                    else
                        dc->errorCounter++;
                }
            }
            break;

            case NALU_TYPE_SPS: ////////////////////////////////////////
            {
                nalu_clean_sample(dc->bitstr);

                retcode = decodeSPS_legacy(dc);
/*
                sps_t *sps = (sps_t*)calloc(1, sizeof(sps_t));
                if (sps)
                {
                    if (decodeSPS(dc->bitstr, sps))
                    {
                        dc->sps_array[sps->seq_parameter_set_id] = sps;

                        dc->active_sps = sps->seq_parameter_set_id;
                        dc->profile_idc = sps->profile_idc;
                        dc->ChromaArrayType = sps->ChromaArrayType;

                        // Init some quantization tables
                        computeLevelScale4x4(dc, sps);
                        computeLevelScale8x8(dc, sps);

                        // Macroblocks "table" allocation (on macroblock **mbs_data):
                        dc->PicSizeInMbs = sps->FrameHeightInMbs * sps->PicWidthInMbs;
                        dc->mb_array = (Macroblock_t**)calloc(dc->PicSizeInMbs, sizeof(Macroblock_t*));

                        //printSPS(sps);
                        retcode = SUCCESS;
                    }
                    else
                        dc->errorCounter++;
                }
*/
            }
            break;

            case NALU_TYPE_PPS: ////////////////////////////////////////
            {
                nalu_clean_sample(dc->bitstr);

                pps_t *pps = (pps_t*)calloc(1, sizeof(pps_t));
                if (pps)
                {
                    if (decodePPS(dc->bitstr, pps, dc->sps_array))
                    {
                        dc->pps_array[pps->pic_parameter_set_id] = pps;
                        dc->active_pps = pps->pic_parameter_set_id;
                        dc->entropy_coding_mode_flag = pps->entropy_coding_mode_flag,

                        //printPPS(pps, dc->sps_array);
                        retcode = SUCCESS;
                    }
                    else
                        dc->errorCounter++;
                }
            }
            break;

            default:
            {
                TRACE_ERROR(NALU, "Unsupported NAL Unit! (nal_unit_type %i)", dc->active_nalu->nal_unit_type);
                dc->errorCounter++;
            }
            break;
        }

        // Reset NAL Unit structure
        nalu_reset(dc->active_nalu);
    }
    else
    {
        retcode = FAILURE;
        dc->errorCounter++;
        TRACE_WARNING(NALU, "No valid NAL Unit to decode! (errorCounter = %i)", dc->errorCounter);
    }

    return retcode;
}
Ejemplo n.º 3
0
// Decode the next nal unit from the buffer. The buffer must start with a start code. length is: in(input buffer size)/out(consumed bytes into the input buffer)
RetCode decode_nalu(uint8_t* data, uint32_t* length)
{
  nal_unit_t nalu;
  RetCode r;
  if (!data) // the caller mean the end of file(/stream), so we can finish and tide up the current decoding session
  {
 //   end_decoding(&pdd);
    return RET_SUCCESS;
  }

  r = parse_nal_unit(data, length, &nalu, 0);

  if (r != RET_SUCCESS)
  {
    LUD_DEBUG_ASSERT(0);
    return r;
  }

  switch (nalu.nal_unit_type)
  {
    case NALU_TYPE_IDR:
    {
      LUD_TRACE(TRACE_INFO, "Parsing an IDR\n");
      r = decode_slice(&pdd, &nalu);
      LUD_DEBUG_ASSERT(RET_SUCCESS == r);
      break;
    }
    case NALU_TYPE_SPS:
    {
      // Finish the previous pic decoding
     // end_picture(&pdd);
      LUD_TRACE(TRACE_INFO, "(decode.c) Parsing SPS\n");
      seq_parameter_set_rbsp_t* sps;
      r = parse_sps(&nalu, &sps);
      if (RET_SUCCESS != r)
      {
        LUD_TRACE(TRACE_ERROR, "(decode.c) Error %d while parsing SPS, nalu dropped...\n", r);
        break;
      }
      if (gdd.sps[sps->seq_parameter_set_id]!=0)
        release_sps(gdd.sps[sps->seq_parameter_set_id]);
      gdd.sps[sps->seq_parameter_set_id] = sps;
      break;
    }
    case NALU_TYPE_PPS:
    {
      // Finish the previous pic decoding
//      end_picture(&pdd);
      LUD_TRACE(TRACE_INFO, "(decode.c) Parsing PPS\n");
      pic_parameter_set_rbsp_t* pps;
      r = parse_pps(&nalu, &pps);
      if (RET_SUCCESS != r)
      {
        LUD_TRACE(TRACE_ERROR, "(decode.c) Error %d while parsing PPS, nalu dropped...\n", r);
        break;
      }
      if (gdd.pps[pps->pic_parameter_set_id]!=0)
        release_pps(gdd.pps[pps->pic_parameter_set_id]);
      gdd.pps[pps->pic_parameter_set_id] = pps;
      break;
    }
    default:
      LUD_TRACE(TRACE_INFO, "(decode.c) NALU type %d is unknown\n", nalu.nal_unit_type);
      LUD_DEBUG_ASSERT(0); // Unknown NAL Unit type !
      break;
  }

  return RET_SUCCESS;
}
Ejemplo n.º 4
0
/*!
 * \brief Decode H.264 bitsream.
 * \param *input_video A pointer to a VideoFile_t structure, containing every informations available about the current video file.
 * \param *output_directory The directory where to save exported pictures.
 * \param picture_format The picture file format.
 * \param picture_quality The quality we want for exported picture [1;100].
 * \param picture_number The number of thumbnail(s) we want to extract.
 * \param picture_extractionmode The method of distribution for thumbnails extraction.
 * \return 1 if succeed, 0 otherwise.
 *
 * This decoder is based on the 'ITU-T H.264' recommendation:
 * 'Advanced Video Coding for generic audiovisual services'
 * It also correspond to 'ISO/IEC 14496-10' international standard, part 10:
 * 'Advanced Video Coding'.
 *
 * You can download the H.264 specification for free on the ITU website:
 * http://www.itu.int/rec/T-REC-H.264
 *
 * The very first step to H.264 bitstream decoding. Initialize DecodingContext,
 * then start the decoding process, which loop on NAL Units found in the bitstream.
 * Each NAL Unit is processed following it's content type.
 */
int h264_decode(VideoFile_t *input_video,
                const char *output_directory,
                const int picture_format,
                const int picture_quality,
                const int picture_number,
                const int picture_extractionmode)
{
    TRACE_INFO(H264, BLD_GREEN "h264_decode()\n" CLR_RESET);
    int retcode = FAILURE;

    // Init decoding context
    DecodingContext_t *dc = initDecodingContext(input_video);

    if (dc == NULL)
    {
        TRACE_ERROR(H264, "Unable to allocate DecodingContext_t, exiting decoder\n");
        return retcode;
    }
    else
    {
        // Init some quantization parameters
        computeNormAdjust(dc);

        // Init some export parameters
        //strncpy(dc->output_directory, output_directory, sizeof(output_directory));
        dc->output_format = picture_format;
        dc->picture_quality = picture_quality;
        dc->picture_number = picture_number;
        dc->picture_extractionmode = picture_extractionmode;

        // Start the decoding process
        dc->decoderRunning = true;
    }

    // Loop until end of file
    while (dc->decoderRunning)
    {
        // Load next NAL Unit
        retcode = buffer_feed_next_sample(dc->bitstr);

        // Check header validity
        if (nalu_parse_header(dc->bitstr, dc->active_nalu))
        {
            // Decode NAL Unit content
            switch (dc->active_nalu->nal_unit_type)
            {
            case NALU_TYPE_SLICE: // 1
            {
                TRACE_1(NALU, "This decoder only support IDR slice decoding!\n");
            }
            break;

            case NALU_TYPE_IDR: // 5
            {
                dc->IdrPicFlag = true;
                nalu_clean_sample(dc->bitstr);

                TRACE_INFO(MAIN, "> " BLD_GREEN "decodeIDR(%i)\n" CLR_RESET, dc->idrCounter);

                if (decode_slice(dc))
                {
                    retcode = SUCCESS;
                    dc->errorCounter = 0;
                    dc->idrCounter++;
                    dc->frameCounter++;
                }
                else
                    dc->errorCounter++;

                dc->IdrPicFlag = false;
            }
            break;

            case NALU_TYPE_SEI: // 6
            {
                nalu_clean_sample(dc->bitstr);

                if (decodeSEI(dc))
                {
                    retcode = SUCCESS;
                    printSEI(dc);
                }
                else
                    dc->errorCounter++;
            }
            break;

            case NALU_TYPE_SPS: // 7
            {
                nalu_clean_sample(dc->bitstr);

                if (decodeSPS(dc))
                {
                    retcode = SUCCESS;
                    printSPS(dc);
                }
                else
                    dc->errorCounter++;
            }
            break;

            case NALU_TYPE_PPS: // 8
            {
                nalu_clean_sample(dc->bitstr);

                if (decodePPS(dc))
                {
                    retcode = SUCCESS;
                    printPPS(dc);
                }
                else
                    dc->errorCounter++;
            }
            break;

            default:
            {
                dc->errorCounter++;
                TRACE_ERROR(NALU, "Unsupported NAL Unit! (nal_unit_type %i)\n", dc->active_nalu->nal_unit_type);
            }
            break;
            }

            // Reset NAL Unit structure
            nalu_reset(dc->active_nalu);
        }
        else
        {
            dc->errorCounter++;
            TRACE_WARNING(NALU, "No valid NAL Unit to decode! (errorCounter = %i)\n", dc->errorCounter);
        }

        if (dc->idrCounter == picture_number)
        {
            TRACE_INFO(H264, ">> " BLD_YELLOW "Decoding of %i IDR successfull!\n" CLR_RESET, dc->idrCounter);
            TRACE_INFO(H264, "H.264 decoding ended\n");
            retcode = SUCCESS;
            dc->decoderRunning = false;
        }

        if (dc->errorCounter > 64 || retcode == FAILURE)
        {
            TRACE_ERROR(H264, "Error inside NAL Unit decoding loop! (errorCounter = %i) (current nal_unit_type = %i)\n", dc->errorCounter, dc->active_nalu->nal_unit_type);
            TRACE_ERROR(H264, "H.264 decoding aborted\n");
            retcode = FAILURE;
            dc->decoderRunning = false;
        }
    }

    // Destroy decoding context
    freeDecodingContext(&dc);

    // Exit decoder
    return retcode;
}
Ejemplo n.º 5
0
int ff_h263_decode_frame(AVCodecContext *avctx,
                             void *data, int *data_size,
                             const uint8_t *buf, int buf_size)
{
    MpegEncContext *s = avctx->priv_data;
    int ret;
    AVFrame *pict = data;

#ifdef PRINT_FRAME_TIME
uint64_t time= rdtsc();
#endif
#ifdef DEBUG
    av_log(avctx, AV_LOG_DEBUG, "*****frame %d size=%d\n", avctx->frame_number, buf_size);
    if(buf_size>0)
        av_log(avctx, AV_LOG_DEBUG, "bytes=%x %x %x %x\n", buf[0], buf[1], buf[2], buf[3]);
#endif
    s->flags= avctx->flags;
    s->flags2= avctx->flags2;

    /* no supplementary picture */
    if (buf_size == 0) {
        /* special case for last picture */
        if (s->low_delay==0 && s->next_picture_ptr) {
            *pict= *(AVFrame*)s->next_picture_ptr;
            s->next_picture_ptr= NULL;

            *data_size = sizeof(AVFrame);
        }

        return 0;
    }

retry:

    init_get_bits(&s->gb, buf, buf_size*8);
    s->bitstream_buffer_size=0;

    if (!s->context_initialized) {
        if (MPV_common_init(s) < 0) //we need the idct permutaton for reading a custom matrix
            return -1;
    }

    /* We need to set current_picture_ptr before reading the header,
     * otherwise we cannot store anyting in there */
    if(s->current_picture_ptr==NULL || s->current_picture_ptr->data[0]){
        int i= ff_find_unused_picture(s, 0);
        s->current_picture_ptr= &s->picture[i];
    }

    ret = flv_h263_decode_picture_header(s);

    if(ret==FRAME_SKIPPED) return get_consumed_bytes(s, buf_size);

    /* skip if the header was thrashed */
    if (ret < 0){
        av_log(s->avctx, AV_LOG_ERROR, "header damaged\n");
        return -1;
    }

    avctx->has_b_frames= !s->low_delay;

        /* After H263 & mpeg4 header decode we have the height, width,*/
        /* and other parameters. So then we could init the picture   */
        /* FIXME: By the way H263 decoder is evolving it should have */
        /* an H263EncContext                                         */

    if (   s->width  != avctx->coded_width
        || s->height != avctx->coded_height) {
        /* H.263 could change picture size any time */
        ParseContext pc= s->parse_context; //FIXME move these demuxng hack to avformat
        s->parse_context.buffer=0;
        MPV_common_end(s);
        s->parse_context= pc;
    }
    if (!s->context_initialized) {
        avcodec_set_dimensions(avctx, s->width, s->height);

        goto retry;
    }

    if((s->codec_id==CODEC_ID_H263 || s->codec_id==CODEC_ID_H263P))
        s->gob_index = ff_h263_get_gob_height(s);

    // for hurry_up==5
    s->current_picture.pict_type= s->pict_type;
    s->current_picture.key_frame= s->pict_type == FF_I_TYPE;

    /* skip B-frames if we don't have reference frames */
    if(s->last_picture_ptr==NULL && (s->pict_type==FF_B_TYPE || s->dropable)) return get_consumed_bytes(s, buf_size);
    /* skip b frames if we are in a hurry */
    if(avctx->hurry_up && s->pict_type==FF_B_TYPE) return get_consumed_bytes(s, buf_size);
    if(   (avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type==FF_B_TYPE)
       || (avctx->skip_frame >= AVDISCARD_NONKEY && s->pict_type!=FF_I_TYPE)
       ||  avctx->skip_frame >= AVDISCARD_ALL)
        return get_consumed_bytes(s, buf_size);
    /* skip everything if we are in a hurry>=5 */
    if(avctx->hurry_up>=5) return get_consumed_bytes(s, buf_size);

    if(s->next_p_frame_damaged){
        if(s->pict_type==FF_B_TYPE)
            return get_consumed_bytes(s, buf_size);
        else
            s->next_p_frame_damaged=0;
    }

    if((s->avctx->flags2 & CODEC_FLAG2_FAST) && s->pict_type==FF_B_TYPE){
        s->me.qpel_put= s->dsp.put_2tap_qpel_pixels_tab;
        s->me.qpel_avg= s->dsp.avg_2tap_qpel_pixels_tab;
    }else if((!s->no_rounding) || s->pict_type==FF_B_TYPE){
        s->me.qpel_put= s->dsp.put_qpel_pixels_tab;
        s->me.qpel_avg= s->dsp.avg_qpel_pixels_tab;
    }else{
        s->me.qpel_put= s->dsp.put_no_rnd_qpel_pixels_tab;
        s->me.qpel_avg= s->dsp.avg_qpel_pixels_tab;
    }

    if(MPV_frame_start(s, avctx) < 0)
        return -1;

    /* decode each macroblock */
    s->mb_x=0;
    s->mb_y=0;

    decode_slice(s);
    while(s->mb_y<s->mb_height){
            if(ff_h263_resync(s)<0)
                break;

        decode_slice(s);
    }

intrax8_decoded:

    MPV_frame_end(s);

assert(s->current_picture.pict_type == s->current_picture_ptr->pict_type);
assert(s->current_picture.pict_type == s->pict_type);
    if (s->pict_type == FF_B_TYPE || s->low_delay) {
        *pict= *(AVFrame*)s->current_picture_ptr;
    } else if (s->last_picture_ptr != NULL) {
        *pict= *(AVFrame*)s->last_picture_ptr;
    }

    if(s->last_picture_ptr || s->low_delay){
        *data_size = sizeof(AVFrame);
        ff_print_debug_info(s, pict);
    }

    /* Return the Picture timestamp as the frame number */
    /* we subtract 1 because it is added on utils.c     */
    avctx->frame_number = s->picture_number - 1;

#ifdef PRINT_FRAME_TIME
av_log(avctx, AV_LOG_DEBUG, "%"PRId64"\n", rdtsc()-time);
#endif

    return get_consumed_bytes(s, buf_size);
}