Esempio n. 1
0
static gboolean
gst_mpeg2dec_flush (GstVideoDecoder * decoder)
{
  GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder);

  /* reset the initial video state */
  mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_PICTURE;
  mpeg2_reset (mpeg2dec->decoder, 1);
  mpeg2_skip (mpeg2dec->decoder, 1);

  gst_mpeg2dec_clear_buffers (mpeg2dec);

  return TRUE;
}
Esempio n. 2
0
static gboolean
gst_mpeg2dec_stop (GstVideoDecoder * decoder)
{
  GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder);

  mpeg2_reset (mpeg2dec->decoder, 0);
  mpeg2_skip (mpeg2dec->decoder, 1);

  gst_mpeg2dec_clear_buffers (mpeg2dec);

  if (mpeg2dec->input_state)
    gst_video_codec_state_unref (mpeg2dec->input_state);
  mpeg2dec->input_state = NULL;

  return TRUE;
}
Esempio n. 3
0
static gboolean
gst_mpeg2dec_stop (GstVideoDecoder * decoder)
{
  GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder);

  mpeg2_reset (mpeg2dec->decoder, 0);
  mpeg2_skip (mpeg2dec->decoder, 1);

  gst_mpeg2dec_clear_buffers (mpeg2dec);

  if (mpeg2dec->input_state)
    gst_video_codec_state_unref (mpeg2dec->input_state);
  mpeg2dec->input_state = NULL;

  if (mpeg2dec->downstream_pool) {
    gst_buffer_pool_set_active (mpeg2dec->downstream_pool, FALSE);
    gst_object_unref (mpeg2dec->downstream_pool);
  }

  return TRUE;
}
Esempio n. 4
0
File: libmpeg2.c Progetto: Kafay/vlc
/*****************************************************************************
 * RunDecoder: the libmpeg2 decoder
 *****************************************************************************/
static picture_t *DecodeBlock( decoder_t *p_dec, block_t **pp_block )
{
    decoder_sys_t   *p_sys = p_dec->p_sys;
    mpeg2_state_t   state;
    picture_t       *p_pic;

    block_t *p_block;

    if( !pp_block || !*pp_block )
        return NULL;

    p_block = *pp_block;
    if( p_block->i_flags & (BLOCK_FLAG_DISCONTINUITY | BLOCK_FLAG_CORRUPTED) )
    {
        cc_Flush( &p_sys->cc );
        mpeg2_reset( p_sys->p_mpeg2dec, p_sys->p_info->sequence != NULL );
        DpbClean( p_dec );
    }

    while( 1 )
    {
        state = mpeg2_parse( p_sys->p_mpeg2dec );

        switch( state )
        {
        case STATE_SEQUENCE:
        {
            /* */
            DpbClean( p_dec );

            /* */
            mpeg2_custom_fbuf( p_sys->p_mpeg2dec, 1 );

            /* Set the first 2 reference frames */
            GetAR( p_dec );
            for( int i = 0; i < 2; i++ )
            {
                picture_t *p_picture = DpbNewPicture( p_dec );
                if( !p_picture )
                {
                    /* Is it ok ? or do we need a reset ? */
                    block_Release( p_block );
                    return NULL;
                }
                PutPicture( p_dec, p_picture );
            }

            if( p_sys->p_synchro )
                decoder_SynchroRelease( p_sys->p_synchro );

            p_sys->p_synchro =
                decoder_SynchroInit( p_dec, (uint32_t)((uint64_t)1001000000 * 27 /
                                     p_sys->p_info->sequence->frame_period) );
            p_sys->b_after_sequence_header = true;
            break;
        }

        case STATE_GOP:
            /* There can be userdata in a GOP. It needs to be remembered for the next picture. */
            if( p_sys->p_info->user_data_len > 2 )
            {
                free( p_sys->p_gop_user_data );
                p_sys->p_gop_user_data = calloc( p_sys->p_info->user_data_len, sizeof(uint8_t) );
                if( p_sys->p_gop_user_data )
                {
                    p_sys->i_gop_user_data = p_sys->p_info->user_data_len;
                    memcpy( p_sys->p_gop_user_data, p_sys->p_info->user_data, p_sys->p_info->user_data_len );
                }
            }
            break;

        case STATE_PICTURE:
        {
            const mpeg2_info_t *p_info = p_sys->p_info;
            const mpeg2_picture_t *p_current = p_info->current_picture;

            mtime_t i_pts, i_dts;

            if( p_sys->b_after_sequence_header &&
                (p_current->flags &
                    PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_P )
            {
                /* Intra-slice refresh. Simulate a blank I picture. */
                msg_Dbg( p_dec, "intra-slice refresh stream" );
                decoder_SynchroNewPicture( p_sys->p_synchro,
                                           I_CODING_TYPE, 2, 0, 0,
                                           p_info->sequence->flags & SEQ_FLAG_LOW_DELAY );
                decoder_SynchroDecode( p_sys->p_synchro );
                decoder_SynchroEnd( p_sys->p_synchro, I_CODING_TYPE, 0 );
                p_sys->b_slice_i = true;
            }
            p_sys->b_after_sequence_header = false;

#ifdef PIC_FLAG_PTS
            i_pts = p_current->flags & PIC_FLAG_PTS ?
                ( ( p_current->pts ==
                    (uint32_t)p_sys->i_current_pts ) ?
                  p_sys->i_current_pts : p_sys->i_previous_pts ) : 0;
            i_dts = 0;

            /* Hack to handle demuxers which only have DTS timestamps */
            if( !i_pts && !p_block->i_pts && p_block->i_dts > 0 )
            {
                if( p_info->sequence->flags & SEQ_FLAG_LOW_DELAY ||
                    (p_current->flags &
                      PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_B )
                {
                    i_pts = p_block->i_dts;
                }
            }
            p_block->i_pts = p_block->i_dts = 0;
            /* End hack */

#else /* New interface */

            i_pts = p_current->flags & PIC_FLAG_TAGS ?
                ( ( p_current->tag == (uint32_t)p_sys->i_current_pts ) ?
                            p_sys->i_current_pts : p_sys->i_previous_pts ) : 0;
            i_dts = p_current->flags & PIC_FLAG_TAGS ?
                ( ( p_current->tag2 == (uint32_t)p_sys->i_current_dts ) ?
                            p_sys->i_current_dts : p_sys->i_previous_dts ) : 0;
#endif

            /* If nb_fields == 1, it is a field picture, and it will be
             * followed by another field picture for which we won't call
             * decoder_SynchroNewPicture() because this would have other
             * problems, so we take it into account here.
             * This kind of sucks, but I didn't think better. --Meuuh
             */
            decoder_SynchroNewPicture( p_sys->p_synchro,
                                       p_current->flags & PIC_MASK_CODING_TYPE,
                                       p_current->nb_fields == 1 ? 2 :
                                       p_current->nb_fields, i_pts, i_dts,
                                       p_info->sequence->flags & SEQ_FLAG_LOW_DELAY );


            bool b_skip = false;
            if( !p_dec->b_pace_control && !p_sys->b_preroll &&
                !(p_sys->b_slice_i
                   && ((p_current->flags
                         & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_P))
                   && !decoder_SynchroChoose( p_sys->p_synchro,
                              p_current->flags
                                & PIC_MASK_CODING_TYPE,
                              /*p_sys->p_vout->render_time*/ 0 /*FIXME*/,
                              p_info->sequence->flags & SEQ_FLAG_LOW_DELAY ) )
            {
                b_skip = true;
            }

            p_pic = NULL;
            if( !b_skip )
                p_pic = DpbNewPicture( p_dec );

            if( b_skip || !p_pic )
            {
                mpeg2_skip( p_sys->p_mpeg2dec, 1 );
                p_sys->b_skip = true;
                decoder_SynchroTrash( p_sys->p_synchro );

                PutPicture( p_dec, NULL );

                if( !b_skip )
                {
                    block_Release( p_block );
                    return NULL;
                }
            }
            else
            {
                mpeg2_skip( p_sys->p_mpeg2dec, 0 );
                p_sys->b_skip = false;
                decoder_SynchroDecode( p_sys->p_synchro );

                PutPicture( p_dec, p_pic );
            }
            if( p_info->user_data_len > 2 || p_sys->i_gop_user_data > 2 )
            {
                p_sys->i_cc_pts = i_pts;
                p_sys->i_cc_dts = i_dts;
                if( (p_current->flags
                             & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_P )
                    p_sys->i_cc_flags = BLOCK_FLAG_TYPE_P;
                else if( (p_current->flags
                             & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_B )
                    p_sys->i_cc_flags = BLOCK_FLAG_TYPE_B;
                else p_sys->i_cc_flags = BLOCK_FLAG_TYPE_I;

                if( p_sys->i_gop_user_data > 2 )
                {
                    /* We now have picture info for any cached user_data out of the gop */
                    cc_Extract( &p_sys->cc, &p_sys->p_gop_user_data[0], p_sys->i_gop_user_data );
                    p_sys->i_gop_user_data = 0;
                }

                /* Extract the CC from the user_data of the picture */
                if( p_info->user_data_len > 2 )
                    cc_Extract( &p_sys->cc, &p_info->user_data[0], p_info->user_data_len );
            }
        }
        break;


        case STATE_BUFFER:
            if( !p_block->i_buffer )
            {
                block_Release( p_block );
                return NULL;
            }

            if( (p_block->i_flags & (BLOCK_FLAG_DISCONTINUITY
                                      | BLOCK_FLAG_CORRUPTED)) &&
                p_sys->p_synchro &&
                p_sys->p_info->sequence &&
                p_sys->p_info->sequence->width != (unsigned)-1 )
            {
                decoder_SynchroReset( p_sys->p_synchro );
                if( p_sys->p_info->current_fbuf != NULL &&
                    p_sys->p_info->current_fbuf->id != NULL )
                {
                    p_sys->b_garbage_pic = true;
                }
                if( p_sys->b_slice_i )
                {
                    decoder_SynchroNewPicture( p_sys->p_synchro,
                                               I_CODING_TYPE, 2, 0, 0,
                                               p_sys->p_info->sequence->flags &
                                                            SEQ_FLAG_LOW_DELAY );
                    decoder_SynchroDecode( p_sys->p_synchro );
                    decoder_SynchroEnd( p_sys->p_synchro, I_CODING_TYPE, 0 );
                }
            }

            if( p_block->i_flags & BLOCK_FLAG_PREROLL )
            {
                p_sys->b_preroll = true;
            }
            else if( p_sys->b_preroll )
            {
                p_sys->b_preroll = false;
                if( p_sys->p_synchro )
                    decoder_SynchroReset( p_sys->p_synchro );
            }

#ifdef PIC_FLAG_PTS
            if( p_block->i_pts )
            {
                mpeg2_pts( p_sys->p_mpeg2dec, (uint32_t)p_block->i_pts );

#else /* New interface */
            if( p_block->i_pts || p_block->i_dts )
            {
                mpeg2_tag_picture( p_sys->p_mpeg2dec,
                                   (uint32_t)p_block->i_pts,
                                   (uint32_t)p_block->i_dts );
#endif
                p_sys->i_previous_pts = p_sys->i_current_pts;
                p_sys->i_current_pts = p_block->i_pts;
                p_sys->i_previous_dts = p_sys->i_current_dts;
                p_sys->i_current_dts = p_block->i_dts;
            }

            mpeg2_buffer( p_sys->p_mpeg2dec, p_block->p_buffer,
                          p_block->p_buffer + p_block->i_buffer );

            p_block->i_buffer = 0;
            break;

#if MPEG2_RELEASE >= MPEG2_VERSION (0, 5, 0)

        case STATE_SEQUENCE_MODIFIED:
            GetAR( p_dec );
            break;
#endif
        case STATE_PICTURE_2ND:
            p_sys->b_second_field = true;
            break;


        case STATE_INVALID_END:
        case STATE_END:
        case STATE_SLICE:
            p_pic = NULL;
            if( p_sys->p_info->display_fbuf &&
                p_sys->p_info->display_fbuf->id )
            {
                p_pic = p_sys->p_info->display_fbuf->id;
                DpbDisplayPicture( p_dec, p_pic );

                decoder_SynchroEnd( p_sys->p_synchro,
                                    p_sys->p_info->display_picture->flags & PIC_MASK_CODING_TYPE,
                                    p_sys->b_garbage_pic );

                p_pic->date = decoder_SynchroDate( p_sys->p_synchro );
                if( p_sys->b_garbage_pic )
                    p_pic->date = 0; /* ??? */
                p_sys->b_garbage_pic = false;
            }

            if( p_sys->p_info->discard_fbuf &&
                p_sys->p_info->discard_fbuf->id )
            {
                DpbUnlinkPicture( p_dec, p_sys->p_info->discard_fbuf->id );
            }

            /* For still frames */
            if( state == STATE_END && p_pic )
                p_pic->b_force = true;

            if( p_pic )
            {
                /* Avoid frames with identical timestamps.
                 * Especially needed for still frames in DVD menus. */
                if( p_sys->i_last_frame_pts == p_pic->date )
                    p_pic->date++;
                p_sys->i_last_frame_pts = p_pic->date;
                return p_pic;
            }
            break;

        case STATE_INVALID:
        {
            msg_Err( p_dec, "invalid picture encountered" );
            /* I don't think we have anything to do, but well without
             * docs ... */
            break;
        }

        default:
            break;
        }
    }

    /* Never reached */
    return NULL;
}

/*****************************************************************************
 * CloseDecoder: libmpeg2 decoder destruction
 *****************************************************************************/
static void CloseDecoder( vlc_object_t *p_this )
{
    decoder_t *p_dec = (decoder_t *)p_this;
    decoder_sys_t *p_sys = p_dec->p_sys;

    DpbClean( p_dec );

    free( p_sys->p_gop_user_data );

    if( p_sys->p_synchro ) decoder_SynchroRelease( p_sys->p_synchro );

    if( p_sys->p_mpeg2dec ) mpeg2_close( p_sys->p_mpeg2dec );

    free( p_sys );
}
Esempio n. 5
0
static GstFlowReturn
handle_slice (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info)
{
  GstFlowReturn ret = GST_FLOW_OK;
  GstVideoCodecFrame *frame;
  const mpeg2_picture_t *picture;
  gboolean key_frame = FALSE;
  GstVideoCodecState *state;

  GST_DEBUG_OBJECT (mpeg2dec,
      "fbuf:%p display_picture:%p current_picture:%p fbuf->id:%d",
      info->display_fbuf, info->display_picture, info->current_picture,
      GPOINTER_TO_INT (info->display_fbuf->id) - 1);

  /* Note, the fbuf-id is shifted by 1 to make the difference between
   * NULL values (used by dummy buffers) and 'real' values */
  frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (mpeg2dec),
      GPOINTER_TO_INT (info->display_fbuf->id) - 1);
  if (!frame)
    goto no_frame;
  picture = info->display_picture;
  key_frame = (picture->flags & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_I;

  GST_DEBUG_OBJECT (mpeg2dec, "picture flags: %d, type: %d, keyframe: %d",
      picture->flags, picture->flags & PIC_MASK_CODING_TYPE, key_frame);

  if (key_frame) {
    mpeg2_skip (mpeg2dec->decoder, 0);
  }

  if (mpeg2dec->discont_state == MPEG2DEC_DISC_NEW_KEYFRAME && key_frame)
    mpeg2dec->discont_state = MPEG2DEC_DISC_NONE;

  if (picture->flags & PIC_FLAG_SKIP) {
    GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer because of skip flag");
    ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
    mpeg2_skip (mpeg2dec->decoder, 1);
    return ret;
  }

  if (mpeg2dec->discont_state != MPEG2DEC_DISC_NONE) {
    GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer, discont state %d",
        mpeg2dec->discont_state);
    ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
    return ret;
  }

  state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (mpeg2dec));

  /* do cropping if the target region is smaller than the input one */
  if (mpeg2dec->need_cropping && !mpeg2dec->has_cropping) {
    GstVideoFrame *vframe;

    if (gst_video_decoder_get_max_decode_time (GST_VIDEO_DECODER (mpeg2dec),
            frame) < 0) {
      GST_DEBUG_OBJECT (mpeg2dec, "dropping buffer crop, too late");
      ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
      goto beach;
    }

    GST_DEBUG_OBJECT (mpeg2dec, "cropping buffer");
    vframe = gst_mpeg2dec_get_buffer (mpeg2dec, frame->system_frame_number);
    g_assert (vframe != NULL);
    ret = gst_mpeg2dec_crop_buffer (mpeg2dec, frame, vframe);
  }

  ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (mpeg2dec), frame);

beach:
  gst_video_codec_state_unref (state);
  return ret;

no_frame:
  {
    GST_DEBUG ("display buffer does not have a valid frame");
    return GST_FLOW_OK;
  }
}
Esempio n. 6
0
static GstFlowReturn
handle_picture (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info,
    GstVideoCodecFrame * frame)
{
  GstFlowReturn ret;
  gint type;
  const gchar *type_str = NULL;
  gboolean key_frame = FALSE;
  const mpeg2_picture_t *picture = info->current_picture;
  GstBuffer *buffer;

  ret = gst_mpeg2dec_alloc_buffer (mpeg2dec, frame, &buffer);
  if (ret != GST_FLOW_OK)
    return ret;

  type = picture->flags & PIC_MASK_CODING_TYPE;
  switch (type) {
    case PIC_FLAG_CODING_TYPE_I:
      key_frame = TRUE;
      mpeg2_skip (mpeg2dec->decoder, 0);
      type_str = "I";
      break;
    case PIC_FLAG_CODING_TYPE_P:
      type_str = "P";
      break;
    case PIC_FLAG_CODING_TYPE_B:
      type_str = "B";
      break;
    default:
      gst_video_codec_frame_ref (frame);
      ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
      GST_VIDEO_DECODER_ERROR (mpeg2dec, 1, STREAM, DECODE,
          ("decoding error"), ("Invalid picture type"), ret);
      return ret;
  }

  GST_DEBUG_OBJECT (mpeg2dec, "handle picture type %s", type_str);
  GST_DEBUG_OBJECT (mpeg2dec, "picture %s, frame %i",
      key_frame ? ", kf," : "    ", frame->system_frame_number);

  if (GST_VIDEO_INFO_IS_INTERLACED (&mpeg2dec->decoded_info)) {
    /* This implies SEQ_FLAG_PROGRESSIVE_SEQUENCE is not set */
    if (picture->flags & PIC_FLAG_TOP_FIELD_FIRST) {
      GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_FLAG_TFF);
    }
    if (!(picture->flags & PIC_FLAG_PROGRESSIVE_FRAME)) {
      GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
    }
#if MPEG2_RELEASE >= MPEG2_VERSION(0,5,0)
    /* repeat field introduced in 0.5.0 */
    if (picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD) {
      GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_FLAG_RFF);
    }
#endif
  }

  if (mpeg2dec->discont_state == MPEG2DEC_DISC_NEW_PICTURE && key_frame) {
    mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_KEYFRAME;
  }

  GST_DEBUG_OBJECT (mpeg2dec,
      "picture: %s %s %s %s %s fields:%d ts:%"
      GST_TIME_FORMAT,
      (picture->flags & PIC_FLAG_PROGRESSIVE_FRAME ? "prog" : "    "),
      (picture->flags & PIC_FLAG_TOP_FIELD_FIRST ? "tff" : "   "),
#if MPEG2_RELEASE >= MPEG2_VERSION(0,5,0)
      (picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD ? "rff" : "   "),
#else
      "unknown rff",
#endif
      (picture->flags & PIC_FLAG_SKIP ? "skip" : "    "),
      (picture->flags & PIC_FLAG_COMPOSITE_DISPLAY ? "composite" : "         "),
      picture->nb_fields, GST_TIME_ARGS (frame->pts));

  return ret;
}
void CMpeg2Decoder::Skip(bool fSkip)
{
	if (m_pDec)
		mpeg2_skip(m_pDec, fSkip);
}
Esempio n. 8
0
static GstFlowReturn
handle_picture (GstMpeg2dec * mpeg2dec, const mpeg2_info_t * info,
    GstVideoCodecFrame * frame)
{
  GstVideoDecoder *decoder = (GstVideoDecoder *) mpeg2dec;
  GstFlowReturn ret;
  gint type;
  const gchar *type_str = NULL;
  gboolean key_frame = FALSE;
  const mpeg2_picture_t *picture = info->current_picture;
  GstVideoFrame vframe;
  guint8 *buf[3];

  ret = gst_video_decoder_allocate_output_frame (decoder, frame);
  if (ret != GST_FLOW_OK)
    return ret;

  type = picture->flags & PIC_MASK_CODING_TYPE;
  switch (type) {
    case PIC_FLAG_CODING_TYPE_I:
      key_frame = TRUE;
      mpeg2_skip (mpeg2dec->decoder, 0);
      type_str = "I";
      break;
    case PIC_FLAG_CODING_TYPE_P:
      type_str = "P";
      break;
    case PIC_FLAG_CODING_TYPE_B:
      type_str = "B";
      break;
    default:
      gst_video_codec_frame_ref (frame);
      ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (mpeg2dec), frame);
      GST_VIDEO_DECODER_ERROR (mpeg2dec, 1, STREAM, DECODE,
          ("decoding error"), ("Invalid picture type"), ret);
      return ret;
  }

  GST_DEBUG_OBJECT (mpeg2dec, "handle picture type %s", type_str);
  GST_DEBUG_OBJECT (mpeg2dec, "picture %s, frame %i",
      key_frame ? ", kf," : "    ", frame->system_frame_number);

  if (GST_VIDEO_INFO_IS_INTERLACED (&mpeg2dec->decoded_info)) {
    /* This implies SEQ_FLAG_PROGRESSIVE_SEQUENCE is not set */
    if (picture->flags & PIC_FLAG_TOP_FIELD_FIRST) {
      GST_BUFFER_FLAG_SET (frame->output_buffer, GST_VIDEO_BUFFER_FLAG_TFF);
    }
    if (!(picture->flags & PIC_FLAG_PROGRESSIVE_FRAME)) {
      GST_BUFFER_FLAG_SET (frame->output_buffer,
          GST_VIDEO_BUFFER_FLAG_INTERLACED);
    }
#if MPEG2_RELEASE >= MPEG2_VERSION(0,5,0)
    /* repeat field introduced in 0.5.0 */
    if (picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD) {
      GST_BUFFER_FLAG_SET (frame->output_buffer, GST_VIDEO_BUFFER_FLAG_RFF);
    }
#endif
  }

  if (mpeg2dec->discont_state == MPEG2DEC_DISC_NEW_PICTURE && key_frame) {
    mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_KEYFRAME;
  }

  GST_DEBUG_OBJECT (mpeg2dec,
      "picture: %s %s %s %s %s fields:%d ts:%"
      GST_TIME_FORMAT,
      (picture->flags & PIC_FLAG_PROGRESSIVE_FRAME ? "prog" : "    "),
      (picture->flags & PIC_FLAG_TOP_FIELD_FIRST ? "tff" : "   "),
#if MPEG2_RELEASE >= MPEG2_VERSION(0,5,0)
      (picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD ? "rff" : "   "),
#else
      "unknown rff",
#endif
      (picture->flags & PIC_FLAG_SKIP ? "skip" : "    "),
      (picture->flags & PIC_FLAG_COMPOSITE_DISPLAY ? "composite" : "         "),
      picture->nb_fields, GST_TIME_ARGS (frame->pts));

  if (!gst_video_frame_map (&vframe, &mpeg2dec->decoded_info,
          frame->output_buffer, GST_MAP_READ | GST_MAP_WRITE))
    goto map_fail;

  buf[0] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
  buf[1] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 1);
  buf[2] = GST_VIDEO_FRAME_PLANE_DATA (&vframe, 2);

  GST_DEBUG_OBJECT (mpeg2dec, "set_buf: %p %p %p, frame %i",
      buf[0], buf[1], buf[2], frame->system_frame_number);

  /* Note: We use a non-null 'id' value to make the distinction
   * between the dummy buffers (which have an id of NULL) and the
   * ones we did */
  mpeg2_stride (mpeg2dec->decoder, vframe.info.stride[0]);
  mpeg2_set_buf (mpeg2dec->decoder, buf,
      GINT_TO_POINTER (frame->system_frame_number + 1));
  gst_mpeg2dec_save_buffer (mpeg2dec, frame->system_frame_number, &vframe);

  return ret;

map_fail:
  {
    GST_ELEMENT_ERROR (mpeg2dec, RESOURCE, WRITE, ("Failed to map frame"),
        (NULL));
    return GST_FLOW_ERROR;
  }
}