Beispiel #1
0
int decode_frame() {
  uint8_t buffer[4096];
  mpeg2_state_t state;
  size_t size;
  const mpeg2_info_t *info = mpeg2_info(decoder);

  for (;;) {
    state = mpeg2_parse(decoder);
    switch (state) {
    case STATE_BUFFER:
      size = fread(buffer, 1, 4096, source);
      if (size <= 0) {
        return -1;
      }
      mpeg2_buffer(decoder, buffer, buffer + 4096);
      break;
    case STATE_SEQUENCE:
      mpeg2_convert(decoder, mpeg2convert_rgb32, NULL);
      break;
    case STATE_SLICE:
    case STATE_END:
    case STATE_INVALID_END:
      present_frame(
          info->sequence->width,
          info->sequence->height,
          info->display_fbuf->buf[0]);
      return 0;
    case STATE_INVALID:
      return -2;
    default:
      break;
    }
  }
}
Beispiel #2
0
int mpeg_instance_init(mpeg_struct_t *Mpeg_Struct, FileInfo_t *mpgfile)
{
#ifndef AVR32
    timer_interval(&Mpeg_Struct->FrameDisplay, 1000/24);
#endif
	mpgfile->Offset = 0;
	optind = 1;
	opterr = 0;
	optopt = '?';
	Mpeg_Struct->framenum = 0;

	Mpeg_Struct->decoder = mpeg2_init ();
	if (Mpeg_Struct->decoder == NULL) {
	return 0;
	}
	Mpeg_Struct->info = mpeg2_info (Mpeg_Struct->decoder);
	Mpeg_Struct->size = (unsigned int)-1;
	Mpeg_Struct->temporal_reference =(unsigned int)-1;
#ifdef Mpeg_Buffering
    RtcTimeCalDisplay();
	_FatData_ReadSection(mpgfile, (unsigned char*)0x80000000 + 0x2000000, 0, 0x2000000);
    RtcTimeCalDisplay();
#endif
	return 1;
}
Beispiel #3
0
Mpeg2 *glueMpeg2_open(char *filename, int stream_from_disk) {
  Mpeg2 *mpg=malloc(sizeof(Mpeg2));
  
  memset(mpg, 0, sizeof(Mpeg2));
  
  mpg->decoder = mpeg2_init();
  if (mpg->decoder == NULL) {
	  glueError("Could not allocate a decoder object (libmpeg2)");
  }
    
  mpg->info = mpeg2_info(mpg->decoder);
  
  if (stream_from_disk) {
    mpg->f = fopen(filename, "rb");
    mpg->stream_from_disk=1;
  } else {
    mpg->data = glueLoadfile(filename);
    mpg->stream_from_disk=0;
  }
  mpg->whereami = 0;
  mpg->framenum = 0;
  mpg->prevtime = 0;
  mpg->end = 0;
  
  glueLoading();
  
  return mpg;
}
Beispiel #4
0
static void sample1 (FILE * file)
{
#define BUFFER_SIZE 4096
    uint8_t buffer[BUFFER_SIZE];
    mpeg2dec_t * mpeg2dec;
    const mpeg2_info_t * info;
    int state;
    int size;
    int framenum = 0;

    mpeg2dec = mpeg2_init ();
    if (mpeg2dec == NULL)
	exit (1);
    info = mpeg2_info (mpeg2dec);

    size = BUFFER_SIZE;
    do {
	state = mpeg2_parse (mpeg2dec);
	switch (state) {
	case -1:
	    size = fread (buffer, 1, BUFFER_SIZE, file);
	    mpeg2_buffer (mpeg2dec, buffer, buffer + size);
	    break;
	case STATE_SLICE:
	case STATE_END:
	    if (info->display_fbuf)
		save_pgm (info->sequence->width, info->sequence->height,
			  info->display_fbuf->buf, framenum++);
	    break;
	}
    } while (size);

    mpeg2_close (mpeg2dec);
}
// -----------------------------------------------------------------------------
// Initialize libmpeg2 and create a decoder object
// -----------------------------------------------------------------------------
bool TTMpeg2Decoder::initDecoder()
{

  // TODO: check for subsequent calling initDecoder

  mpeg2Decoder = mpeg2_init();

  if ( mpeg2Decoder == NULL )
  {
    fprintf (stderr, "%sCould not allocate a decoder object.\n",c_name);
    isDecoder = false;
    return isDecoder;
  }

  isDecoder = true;

  // initialize the decoder buffer
  decoderBuffer = new uint8_t [initialDecoderBufferSize];
  decoderBufferSize = initialDecoderBufferSize;

  mpeg2Info = mpeg2_info(mpeg2Decoder);

  if ( !ttAssigned(mpeg2Info) )
  {
    isDecoder = false;
  }

  // read the first sequence header
  if ( isDecoder )
  {
     decodeFirstMPEG2Frame( formatRGB24 );
  }

  return isDecoder;
}
bool CMpeg2Decoder::GetFrameFlags(uint32_t *pFlags) const
{
	if (!m_pDec)
		return false;

	const mpeg2_info_t *pInfo = mpeg2_info(m_pDec);
	const uint32_t PicFlags = pInfo->display_picture->flags;
	uint32_t Flags = 0;

	if (PicFlags & PIC_FLAG_TOP_FIELD_FIRST)
		Flags |= FRAME_FLAG_TOP_FIELD_FIRST;
	if (PicFlags & PIC_FLAG_REPEAT_FIRST_FIELD)
		Flags |= FRAME_FLAG_REPEAT_FIRST_FIELD;
	if (PicFlags & PIC_FLAG_PROGRESSIVE_FRAME)
		Flags |= FRAME_FLAG_PROGRESSIVE_FRAME;
	if (pInfo->sequence->flags & SEQ_FLAG_PROGRESSIVE_SEQUENCE)
		Flags |= FRAME_FLAG_PROGRESSIVE_SEQUENCE;
	switch (PicFlags & PIC_MASK_CODING_TYPE) {
	case PIC_FLAG_CODING_TYPE_I: Flags |= FRAME_FLAG_I_FRAME; break;
	case PIC_FLAG_CODING_TYPE_P: Flags |= FRAME_FLAG_P_FRAME; break;
	case PIC_FLAG_CODING_TYPE_B: Flags |= FRAME_FLAG_B_FRAME; break;
	}

	*pFlags = Flags;

	return true;
}
void TvideoCodecLibmpeg2::init(void)
{
    mpeg2dec=mpeg2_init();
    info=mpeg2_info(mpeg2dec);
    wait4Iframe=true;
    sequenceFlag=FIELD_TYPE::SEQ_START;
    m_fFilm = false;
}
void decode_mpeg2(decode_t *decode)
{
    mpeg2dec_t *decoder = NULL;
    const mpeg2_info_t *info = NULL;
    const mpeg2_sequence_t *sequence = NULL;
    mpeg2_state_t state;
    size_t size;
    uint32_t ac = 0;

    WriteDataFn writer = write_yuv420p;
    if (decode->format == TC_CODEC_RGB) {
        tc_log_info(__FILE__, "using libmpeg2convert"
                              " RGB24 conversion");
        writer = write_rgb24;
    }

    ac = mpeg2_accel(MPEG2_ACCEL_DETECT);
    show_accel(ac);

    decoder = mpeg2_init();
    if (decoder == NULL) {
        tc_log_error(__FILE__, "Could not allocate a decoder object.");
        import_exit(1);
    }
    info = mpeg2_info(decoder);

    size = (size_t)-1;
    do {
        state = mpeg2_parse(decoder);
        sequence = info->sequence;
        switch (state) {
          case STATE_BUFFER:
            size = tc_pread(decode->fd_in, buffer, BUFFER_SIZE);
            mpeg2_buffer(decoder, buffer, buffer + size);
            break;
          case STATE_SEQUENCE:
            if (decode->format == TC_CODEC_RGB) {
                mpeg2_convert(decoder, mpeg2convert_rgb24, NULL);
            }
            break;
          case STATE_SLICE:
          case STATE_END:
          case STATE_INVALID_END:
            if (info->display_fbuf) {
                writer(decode, info, sequence);
            }
            break;
          default:
            /* can't happen */
            break;
        }
    } while (size);

    mpeg2_close(decoder);
    import_exit(0);
}
/*------------------------------------------------------------------*/
uint8_t 	decoderMpeg::uncompress(uint8_t *in,uint8_t *out,uint32_t len,uint32_t *flag)
{
		if(flag) *flag=0;
#if defined( REMOVE_PADDING)
		while(*(in+len-1)==0) len--;
#endif		
		if(!len) return 1;
		feedData(len,in);

	const mpeg2_info_t  *info ;
	uint8_t *t;

		t=(uint8_t *) MPEG2DEC->fbuf[0]->buf[0];
		mpeg2_cleanup(MPEG2DEC);
		info= mpeg2_info (MPEG2DEC);				
#ifndef ADM_BIG_ENDIAN_ZZ
		if(_postproc.postProcType && _postproc.postProcStrength)
			{ 	// we do postproc !
				// keep

		 		oBuff[0]=out; 
		 		oBuff[1]=out+_w*_h;
 		 		oBuff[2]=out+((_w*_h*5)>>2);
				
				iBuff[0]=t;
		 		iBuff[1]=t+_w*_h;
 		 		iBuff[2]=t+((_w*_h*5)>>2);
        			
			        strideTab[0]=strideTab2[0]=_w;
				strideTab[1]=strideTab2[1]=_w>>1;
				strideTab[2]=strideTab2[2]=_w>>1;
/*
void  pp_postprocess(uint8_t * src[3], int srcStride[3],
                 uint8_t * dst[3], int dstStride[3],
                 int horizontalSize, int verticalSize,
                 QP_STORE_T *QP_store,  int QP_stride,
		 pp_mode_t *mode, 
		 pp_context_t *ppContext, 
		 int pict_type);

*/            			
		 		   pp_postprocess(
		      			iBuff,
		        		strideTab,
		          		oBuff,
		         		strideTab2,
		      			_w,
		        		_h,
		          		MPEG2DEC->decoder.quant,
		          		MPEG2DEC->decoder.quant_stride,
		         		_postproc.ppMode,
		          		_postproc.ppContext,
		          		MPEG2DEC->decoder.coding_type);	
					printf("Postprocessed\n");		  	
			}
Beispiel #10
0
MPEGDecoder::MPEGDecoder() : Codec() {
	_pixelFormat = g_system->getScreenFormat();
	_surface = 0;

	_mpegDecoder = mpeg2_init();

	if (!_mpegDecoder)
		error("Could not initialize libmpeg2");

	_mpegInfo = mpeg2_info(_mpegDecoder);
}
Beispiel #11
0
static gboolean
gst_mpeg2dec_open (GstVideoDecoder * decoder)
{
  GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder);

  mpeg2_accel (MPEG2_ACCEL_DETECT);
  if ((mpeg2dec->decoder = mpeg2_init ()) == NULL)
    return FALSE;
  mpeg2dec->info = mpeg2_info (mpeg2dec->decoder);

  return TRUE;
}
bool CMpeg2Decoder::GetOutputSize(int *pWidth, int *pHeight) const
{
	if (!m_pDec)
		return false;

	const mpeg2_info_t *pInfo = mpeg2_info(m_pDec);

	*pWidth = pInfo->sequence->picture_width;
	*pHeight = pInfo->sequence->picture_height;

	return true;
}
bool CMpeg2Decoder::GetAspectRatio(int *pAspectX, int *pAspectY) const
{
	if (m_pDec) {
		const mpeg2_info_t *pInfo = mpeg2_info(m_pDec);

		if (pInfo->sequence->pixel_width && pInfo->sequence->pixel_height) {
			*pAspectX = pInfo->sequence->picture_width * pInfo->sequence->pixel_width;
			*pAspectY = pInfo->sequence->picture_height * pInfo->sequence->pixel_height;
			ReduceFraction(pAspectX, pAspectY);
			return true;
		}
	}

	return false;
}
Beispiel #14
0
static void sample2 (FILE * mpgfile)
{
#define BUFFER_SIZE 4096
    uint8_t buffer[BUFFER_SIZE];
    mpeg2dec_t * decoder;
    const mpeg2_info_t * info;
    mpeg2_state_t state;
    size_t size;
    int framenum = 0;

    decoder = mpeg2_init ();
    if (decoder == NULL) {
	fprintf (stderr, "Could not allocate a decoder object.\n");
	exit (1);
    }
    info = mpeg2_info (decoder);

    size = (size_t)-1;
    do {
	state = mpeg2_parse (decoder);
	switch (state) {
	case STATE_BUFFER:
	    size = fread (buffer, 1, BUFFER_SIZE, mpgfile);
	    mpeg2_buffer (decoder, buffer, buffer + size);
	    break;
	case STATE_SEQUENCE:
	    mpeg2_convert (decoder, mpeg2convert_rgb24, NULL);
	    break;
	case STATE_SLICE:
	case STATE_END:
	case STATE_INVALID_END:
	    if (info->display_fbuf)
		save_ppm (info->sequence->width, info->sequence->height,
			  info->display_fbuf->buf[0], framenum++);
	    break;
	default:
	    break;
	}
    } while (size);

    mpeg2_close (decoder);
}
bool CMpeg2Decoder::GetFrame(CFrameBuffer *pFrameBuffer) const
{
	if (!m_pDec)
		return false;

	const mpeg2_info_t *pInfo = mpeg2_info(m_pDec);
	if (!pInfo->display_fbuf)
		return false;

	pFrameBuffer->m_Width = pInfo->sequence->picture_width;
	pFrameBuffer->m_Height = pInfo->sequence->picture_height;
	pFrameBuffer->m_PitchY = pInfo->sequence->width;
	pFrameBuffer->m_PitchC = pInfo->sequence->chroma_width;
	pFrameBuffer->m_Buffer[0] = pInfo->display_fbuf->buf[0];
	pFrameBuffer->m_Buffer[1] = pInfo->display_fbuf->buf[1];
	pFrameBuffer->m_Buffer[2] = pInfo->display_fbuf->buf[2];
	pFrameBuffer->m_Subtype = MEDIASUBTYPE_I420;

	return true;
}
const mpeg2_info_t *CMpeg2Decoder::GetMpeg2Info() const
{
	if (!m_pDec)
		return nullptr;
	return mpeg2_info(m_pDec);
}
Beispiel #17
0
static void sample5 (FILE * mpgfile)
{
#define BUFFER_SIZE 4096
#define ALIGN_16(p) ((void *)(((uintptr_t)(p) + 15) & ~((uintptr_t)15)))
    uint8_t buffer[BUFFER_SIZE];
    mpeg2dec_t * decoder;
    const mpeg2_info_t * info;
    const mpeg2_sequence_t * sequence;
    mpeg2_state_t state;
    size_t size;
    int framenum = 0;
    int i, j;
    struct fbuf_s * current_fbuf;

    decoder = mpeg2_init ();
    if (decoder == NULL) {
	fprintf (stderr, "Could not allocate a decoder object.\n");
	exit (1);
    }
    info = mpeg2_info (decoder);

    size = (size_t)-1;
    do {
	state = mpeg2_parse (decoder);
	sequence = info->sequence;
	switch (state) {
	case STATE_BUFFER:
	    size = fread (buffer, 1, BUFFER_SIZE, mpgfile);
	    mpeg2_buffer (decoder, buffer, buffer + size);
	    break;
	case STATE_SEQUENCE:
	    mpeg2_custom_fbuf (decoder, 1);
	    for (i = 0; i < 3; i++) {
		fbuf[i].mbuf[0] = (uint8_t *) malloc (sequence->width *
						 sequence->height + 15);
		fbuf[i].mbuf[1] = (uint8_t *) malloc (sequence->chroma_width * 
						 sequence->chroma_height + 15);
		fbuf[i].mbuf[2] = (uint8_t *) malloc (sequence->chroma_width *  
						 sequence->chroma_height + 15);
		if (!fbuf[i].mbuf[0] || !fbuf[i].mbuf[1] || !fbuf[i].mbuf[2]) {
		    fprintf (stderr, "Could not allocate an output buffer.\n");
		    exit (1);
		}
		for (j = 0; j < 3; j++)
		    fbuf[i].yuv[j] = ALIGN_16 (fbuf[i].mbuf[j]);
		fbuf[i].used = 0;
	    }
	    for (i = 0; i < 2; i++) {
		current_fbuf = get_fbuf ();
		mpeg2_set_buf (decoder, current_fbuf->yuv, current_fbuf);
	    }
	    break;
	case STATE_PICTURE:
	    current_fbuf = get_fbuf ();
	    mpeg2_set_buf (decoder, current_fbuf->yuv, current_fbuf);
	    break;
	case STATE_SLICE:
	case STATE_END:
	case STATE_INVALID_END:
	    if (info->display_fbuf)
		save_pgm (sequence->width, sequence->height,
			  sequence->chroma_width, sequence->chroma_height,
			  info->display_fbuf->buf, framenum++);
	    if (info->discard_fbuf)
                ((struct fbuf_s *)info->discard_fbuf->id)->used = 0;
	    if (state != STATE_SLICE)
		for (i = 0; i < 3; i++)
		    for (j = 0; j < 3; j++)
			free (fbuf[i].mbuf[j]);
	    break;
	default:
	    break;
	}
    } while (size);

    mpeg2_close (decoder);
}
Beispiel #18
0
int  PrivateDecoderMPEG2::GetFrame(AVStream *stream,
                                   AVFrame *picture,
                                   int *got_picture_ptr,
                                   AVPacket *pkt)
{
    AVCodecContext *avctx = stream->codec;
    *got_picture_ptr = 0;
    const mpeg2_info_t *info = mpeg2_info(mpeg2dec);
    mpeg2_buffer(mpeg2dec, pkt->data, pkt->data + pkt->size);
    while (1)
    {
        switch (mpeg2_parse(mpeg2dec))
        {
            case STATE_SEQUENCE:
                // libmpeg2 needs three buffers to do its work.
                // We set up two prediction buffers here, from
                // the set of available video frames.
                mpeg2_custom_fbuf(mpeg2dec, 1);
                for (int i = 0; i < 2; i++)
                {
                    avctx->get_buffer(avctx, picture);
                    mpeg2_set_buf(mpeg2dec, picture->data, picture->opaque);
                }
                break;
            case STATE_PICTURE:
                // This sets up the third buffer for libmpeg2.
                // We use up one of the three buffers for each
                // frame shown. The frames get released once
                // they are drawn (outside this routine).
                avctx->get_buffer(avctx, picture);
                mpeg2_set_buf(mpeg2dec, picture->data, picture->opaque);
                break;
            case STATE_BUFFER:
                // We're finished with the buffer...
                if (partialFrames.size())
                {
                    AVFrame *frm = partialFrames.dequeue();
                    *got_picture_ptr = 1;
                    *picture = *frm;
                    delete frm;
#if 0
                    QString msg("");
                    AvFormatDecoder *nd = (AvFormatDecoder *)(avctx->opaque);
                    if (nd && nd->GetNVP() && nd->GetNVP()->getVideoOutput())
                        msg = nd->GetNVP()->getVideoOutput()->GetFrameStatus();

                    VERBOSE(VB_IMPORTANT, "ret frame: "<<picture->opaque
                            <<"           "<<msg);
#endif
                }
                return pkt->size;
            case STATE_INVALID:
                // This is the error state. The decoder must be
                // reset on an error.
                Reset();
                return -1;

            case STATE_SLICE:
            case STATE_END:
            case STATE_INVALID_END:
                if (info->display_fbuf)
                {
                    bool exists = false;
                    avframe_q::iterator it = partialFrames.begin();
                    for (; it != partialFrames.end(); ++it)
                        if ((*it)->opaque == info->display_fbuf->id)
                            exists = true;

                    if (!exists)
                    {
                        AVFrame *frm = new AVFrame();
                        frm->data[0] = info->display_fbuf->buf[0];
                        frm->data[1] = info->display_fbuf->buf[1];
                        frm->data[2] = info->display_fbuf->buf[2];
                        frm->data[3] = NULL;
                        frm->opaque  = info->display_fbuf->id;
                        frm->type    = FF_BUFFER_TYPE_USER;
                        frm->top_field_first =
                            !!(info->display_picture->flags &
                               PIC_FLAG_TOP_FIELD_FIRST);
                        frm->interlaced_frame =
                            !(info->display_picture->flags &
                              PIC_FLAG_PROGRESSIVE_FRAME);
                        frm->repeat_pict =
                            !!(info->display_picture->flags &
#if CONFIG_LIBMPEG2EXTERNAL
                               PIC_FLAG_REPEAT_FIRST_FIELD);
#else
                               PIC_FLAG_REPEAT_FIELD);
#endif
                        partialFrames.enqueue(frm);

                    }
                }
                if (info->discard_fbuf)
                {
                    bool exists = false;
                    avframe_q::iterator it = partialFrames.begin();
                    for (; it != partialFrames.end(); ++it)
                    {
                        if ((*it)->opaque == info->discard_fbuf->id)
                        {
                            exists = true;
                            (*it)->data[3] = (unsigned char*) 1;
                        }
                    }

                    if (!exists)
                    {
                        AVFrame frame;
                        frame.opaque = info->discard_fbuf->id;
                        frame.type   = FF_BUFFER_TYPE_USER;
                        avctx->release_buffer(avctx, &frame);
                    }
                }
                break;
            default:
                break;
        }
Beispiel #19
0
static int _initmpeg2(FILE *mpgfile,int *w, int *h)
{
    static uint8_t buffer[BUFFER_SIZE];
    mpeg2_state_t state;
    struct fbuf_s * current_fbuf;
    size_t size;
    int pixels;
		int i;

	global_mpegfile=mpgfile;

		if (decoder!=NULL) {
 	mpeg2_close (decoder);
	decoder=NULL;
		}
    decoder = mpeg2_init ();
    if (decoder == NULL) {
	fprintf (stderr, "Could not allocate a decoder object.\n");
	exit (1);
    }
    info = mpeg2_info (decoder);

    size = (size_t)-1;
		do {
	state = mpeg2_parse (decoder);
	if (state==STATE_BUFFER) {
		//fprintf(stderr,"Got STATE_BUFFER\n");
	    size = fread (buffer, 1, BUFFER_SIZE, global_mpegfile);
	    mpeg2_buffer (decoder, buffer, buffer + size);
	}
	else if (state==STATE_SEQUENCE) {
		//fprintf(stderr,"Got STATE_SEQUENCE\n");
	    mpeg2_convert (decoder, mpeg2convert_rgb24, NULL);
	    mpeg2_custom_fbuf (decoder, 1);
			if (w!=NULL) *w=info->sequence->width;
			if (h!=NULL) *h=info->sequence->height;
	    pixels = info->sequence->width * info->sequence->height;
	    for (i = 0; i < 3; i++) {
		fbuf[i].rgb[0] = (uint8_t *) malloc (3 * pixels);
		fbuf[i].rgb[1] = fbuf[i].rgb[2] = NULL;
		if (!fbuf[i].rgb[0]) {
		    fprintf (stderr, "Could not allocate an output buffer.\n");
		    exit (1);
		}
		fbuf[i].used = 0;
	    }
	    for (i = 0; i < 2; i++) {
		current_fbuf = get_fbuf ();
		mpeg2_set_buf (decoder, current_fbuf->rgb, current_fbuf);
	    }
			break;
	}
	else if (state==STATE_PICTURE
			|| state==STATE_SLICE
			|| state==STATE_END
			|| state==STATE_INVALID_END) {
		//if (state==STATE_SLICE) fprintf(stderr,"Got STATE_PICTURE\n");
		//if (state==STATE_SLICE) fprintf(stderr,"Got STATE_SLICE\n");
		//if (state==STATE_END) fprintf(stderr,"Got STATE_END\n");
		//if (state==STATE_INVALID_END) fprintf(stderr,"Got STATE_INVALID_END\n");
		  fprintf(stderr,"GOT unexpected state during initialization.\n");
		  return 0;
	}
    } while (size);

		return 1;
}
HRESULT TvideoCodecLibmpeg2::decompressI(const unsigned char *src,size_t srcLen,IMediaSample *pIn)
{
    TffdshowVideoInputPin::TrateAndFlush *rateInfo = (TffdshowVideoInputPin::TrateAndFlush*)deciV->getRateInfo();
    if (pIn->IsDiscontinuity() == S_OK) {
        rateInfo->isDiscontinuity = true;
    }
    REFERENCE_TIME rtStart=REFTIME_INVALID,rtStop=_I64_MIN;
    HRESULT hr_GetTime = pIn->GetTime(&rtStart,&rtStop);
    if (FAILED(hr_GetTime)) {
        rtStart=rtStop=REFTIME_INVALID;
    }

    int len=(int)srcLen;
    while (len>=0) {
        mpeg2_state_t state=mpeg2_parse(mpeg2dec);
        switch (state) {
            case STATE_BUFFER: {
                if (len==0) {
                    len=-1;
                } else {
                    mpeg2_buffer(mpeg2dec,src,src+len);
                    len=0;
                }
                break;
            }
            case STATE_INVALID:
                break;
            case STATE_GOP: {
                if(rateInfo->rate.Rate == 10000
                        && mpeg2dec->info.user_data_len > 4
                        && *(DWORD*)mpeg2dec->info.user_data == 0xf8014343) {
                    if (!ccDecoder) {
                        ccDecoder=new TccDecoder(deciV);
                    }
                    ccDecoder->decode(mpeg2dec->info.user_data+2,mpeg2dec->info.user_data_len-2);
                }
                break;
            }
            case STATE_SEQUENCE: {
                sequenceFlag=FIELD_TYPE::SEQ_START;

                avgTimePerFrame=10LL*info->sequence->frame_period/27;
                deciV->setAverageTimePerFrame(&avgTimePerFrame,true);
                break;
            }
            case STATE_PICTURE: {
                mpeg2dec->decoder.quant_stride=quantsStride=quantsDx=(info->sequence->picture_width+15)>>4;
                quantsDy=(info->sequence->picture_height+15)>>4;
                quants=mpeg2dec->decoder.quant_store=(char*)realloc(quants, quantsStride*quantsDy*2);
                quantType=1;

                // Remove const cast
                mpeg2_picture_t* CurrentPicture = (mpeg2_picture_t*)mpeg2_info(mpeg2dec)->current_picture;

                // skip preroll pictures as well as non I frames during ff or rew
                if(pIn->IsPreroll()==S_OK || (rateInfo->rate.Rate < (10000 / TffdshowVideoInputPin::MAX_SPEED) && (CurrentPicture->flags&PIC_MASK_CODING_TYPE) != PIC_FLAG_CODING_TYPE_I)) {
                    // DPRINTF(_l("Skip preroll frame\n"));
                    rateInfo->isDiscontinuity = true;
                    CurrentPicture->flags |= PIC_FLAG_SKIP;
                }

                mpeg2_set_rtStart(mpeg2dec,rtStart);
                rtStart=REFTIME_INVALID;
                break;
            }
            case STATE_END:
                sequenceFlag |= FIELD_TYPE::SEQ_END;

            case STATE_SLICE:
                if (info->display_picture && info->discard_fbuf && !(info->display_picture->flags&PIC_FLAG_SKIP)) {
                    {
                        int frametype;
                        if (info->sequence->flags&SEQ_FLAG_MPEG2) {
                            quantType=FF_QSCALE_TYPE_MPEG2;
                        }
                        switch (info->display_picture->flags&PIC_MASK_CODING_TYPE) {
                            case PIC_FLAG_CODING_TYPE_I:
                                frametype=FRAME_TYPE::I;
                                break;
                            case PIC_FLAG_CODING_TYPE_B:
                                frametype=FRAME_TYPE::B;
                                break;
                            default:
                            case PIC_FLAG_CODING_TYPE_P:
                                frametype=FRAME_TYPE::P;
                                break;
                        }
                        if (frametype==FRAME_TYPE::I) {
                            wait4Iframe=false;
                        }
                        if (pIn->IsPreroll()==S_OK) {
                            return sinkD->deliverPreroll(frametype);
                        }

                        int fieldtype = SetDeinterlaceMethod();

                        if (sequenceFlag != FIELD_TYPE::SEQ_START || frametype == FRAME_TYPE::I) {
                            fieldtype|=sequenceFlag;
                            sequenceFlag=0;
                        }

                        unsigned char *data[4]= {info->display_fbuf->buf[0],info->display_fbuf->buf[1],info->display_fbuf->buf[2],NULL};
                        stride_t stride[4]= {info->sequence->width,info->sequence->chroma_width,info->sequence->chroma_width,0};
                        uint64_t csp;
                        switch ((info->sequence->chroma_width==info->sequence->width)+(info->sequence->chroma_height==info->sequence->height)) {
                            case 1:
                                csp=FF_CSP_422P;
                                break;
                            case 2:
                                csp=FF_CSP_444P;
                                break;
                            default:
                            case 0:
                                csp=FF_CSP_420P;
                                break;
                        }

                        Trect r(0,0,info->sequence->picture_width,info->sequence->picture_height);
                        r.sar = Rational(info->sequence->pixel_width,info->sequence->pixel_height);

                        // Correct impossible sar for DVD
                        if (info->sequence->flags & SEQ_FLAG_MPEG2) {
                            r.sar = guessMPEG2sar(r, Rational(info->sequence->pixel_width2,info->sequence->pixel_height2), containerSar);
                        }

                        TffPict pict(csp,data,stride,r,true,frametype,fieldtype,srcLen,NULL); //TODO: src frame size
                        pict.film = m_fFilm;

                        if(frametype == FRAME_TYPE::I) {
                            pict.rtStart = info->display_picture->rtStart;
                        } else {
                            pict.rtStart = oldpict.rtStop;
                        }

                        // cope with a change in rate
                        if (rateInfo->rate.Rate != rateInfo->ratechange.Rate
                                && rateInfo->flushed
                                && frametype == FRAME_TYPE::I) {
                            // Buggy DVD navigator does not work as it is documented.
                            // DPRINTF(_l("rateInfo->ratechange.StartTime = %s rateInfo->rate.StartTime = %s rateInfo->rate.Rate %d"), Trt2str(rateInfo->ratechange.StartTime).c_str(),Trt2str(rateInfo->rate.StartTime).c_str(),rateInfo->rate.Rate);

                            rateInfo->rate.StartTime = pict.rtStart;
                            rateInfo->rate.Rate = rateInfo->ratechange.Rate;
                            rateInfo->isDiscontinuity = true;
                            // DPRINTF(_l("Got Rate StartTime = %s Rate = %d\n"), Trt2str(rateInfo->rate.StartTime).c_str(), rateInfo->rate.Rate);
                        }

                        if ((rateInfo->isDiscontinuity || rateInfo->correctTS) && frametype == FRAME_TYPE::I) {
                            // if we're at a Discontinuity use the times we're being sent in
                            // DPRINTF((ffstring(L"rateInfo->isDiscontinuity found. pict.rtStart ") + Trt2str(pict.rtStart) + L" rateInfo->rate.StartTime " + Trt2str(rateInfo->rate.StartTime)).c_str());
                            pict.rtStart = rateInfo->rate.StartTime + (pict.rtStart - rateInfo->rate.StartTime) * abs(rateInfo->rate.Rate) / 10000;

                            // DPRINTF(_l("rateInfo->isDiscontinuity found. updating rtStart %s oldpict.rtStop %s"),Trt2str(pict.rtStart).c_str(), Trt2str(oldpict.rtStop).c_str());
                            pict.discontinuity = rateInfo->isDiscontinuity;
                            rateInfo->isDiscontinuity = false;
                        } else {
                            pict.rtStart = oldpict.rtStop;
                        }

                        unsigned int field_count = 2;
                        if (info->display_picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD) {
                            field_count++;
                        }

                        if (rateInfo->rate.Rate < (10000 / TffdshowVideoInputPin::MAX_SPEED)) {
                            pict.rtStop = pict.rtStart + avgTimePerFrame;
                        } else
                            pict.rtStop = pict.rtStart +
                                          (avgTimePerFrame * field_count * abs(rateInfo->rate.Rate) / (2 * 10000));
                        if (rateInfo->rate.Rate < (10000 / TffdshowVideoInputPin::MAX_SPEED)) {
                            pict.fieldtype |= FIELD_TYPE::SEQ_START | FIELD_TYPE::SEQ_END;
                        }

                        oldpict=pict;
                        if (rateInfo->isDiscontinuity) {
                            telecineManager.onSeek();
                        }

                        // soft telecine detection
                        // if "Detect soft telecine and average frame durations" is enabled,
                        // flames are flagged as progressive, frame durations are averaged.
                        // pict.film is valid even if the setting is disabled.
                        telecineManager.new_frame(
                            !!(info->display_picture->flags & PIC_FLAG_TOP_FIELD_FIRST),
                            !!(info->display_picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD),
                            pict.rtStart,
                            pict.rtStop);
                    }
                    if (!wait4Iframe) {
                        TffPict pict(oldpict);
                        telecineManager.get_fieldtype(pict);
                        telecineManager.get_timestamps(pict);
                        HRESULT hr = sinkD->deliverDecodedSample(pict);
                        if (hr != S_OK) {
                            return hr;
                        }
                    }
                    // else DPRINTF(_l("libmpeg2: waiting for keyframe"));
                }
                break;
        }
    }
    return S_OK;
}
Beispiel #21
0
static void sample3 (FILE * mpgfile)
{
#define BUFFER_SIZE 4096
    uint8_t buffer[BUFFER_SIZE];
    mpeg2dec_t * decoder;
    const mpeg2_info_t * info;
    const mpeg2_sequence_t * sequence;
    mpeg2_state_t state;
    size_t size;
    int framenum = 0;
    uint8_t * fbuf[3][3];
    int i, j;

    decoder = mpeg2_init ();
    if (decoder == NULL) {
	fprintf (stderr, "Could not allocate a decoder object.\n");
	exit (1);
    }
    info = mpeg2_info (decoder);

    size = (size_t)-1;
    do {
	state = mpeg2_parse (decoder);
	sequence = info->sequence;
	switch (state) {
	case STATE_BUFFER:
	    size = fread (buffer, 1, BUFFER_SIZE, mpgfile);
	    mpeg2_buffer (decoder, buffer, buffer + size);
	    break;
	case STATE_SEQUENCE:
	    for (i = 0; i < 3; i++) {
		fbuf[i][0] = (uint8_t *) malloc (sequence->width *
						 sequence->height);
		fbuf[i][1] = (uint8_t *) malloc (sequence->chroma_width * 
						 sequence->chroma_height);
		fbuf[i][2] = (uint8_t *) malloc (sequence->chroma_width *  
						 sequence->chroma_height);
		if (!fbuf[i][0] || !fbuf[i][1] || !fbuf[i][2]) {
		    fprintf (stderr, "Could not allocate an output buffer.\n");
		    exit (1);
		}
		mpeg2_set_buf (decoder, fbuf[i], NULL);
	    }
	    break;
	case STATE_SLICE:
	case STATE_END:
	case STATE_INVALID_END:
	    if (info->display_fbuf)
		save_pgm (sequence->width, sequence->height,
			  sequence->chroma_width, sequence->chroma_height,
			  info->display_fbuf->buf, framenum++);
	    if (state != STATE_SLICE)
		for (i = 0; i < 3; i++)
		    for (j = 0; j < 3; j++)
			free (fbuf[i][j]);
	    break;
	default:
	    break;
	}
    } while (size);

    mpeg2_close (decoder);
}
Beispiel #22
0
bool BaseAnimationState::init(const char *name) {
#ifdef USE_MPEG2
	char tempFile[512];

	_mpegDecoder = NULL;
	_mpegFile = NULL;

#ifdef BACKEND_8BIT

	uint i, p;

	// Load lookup palettes
	sprintf(tempFile, "%s.pal", name);

	Common::File f;

	if (!f.open(tempFile)) {
		warning("Cutscene: %s palette missing", tempFile);
		return false;
	}

	p = 0;
	while (!f.eos()) {
		_palettes[p].end = f.readUint16LE();
		_palettes[p].cnt = f.readUint16LE();

		for (i = 0; i < _palettes[p].cnt; i++) {
			_palettes[p].pal[4 * i] = f.readByte();
			_palettes[p].pal[4 * i + 1] = f.readByte();
			_palettes[p].pal[4 * i + 2] = f.readByte();
			_palettes[p].pal[4 * i + 3] = 0;
		}
		for (; i < 256; i++) {
			_palettes[p].pal[4 * i] = 0;
			_palettes[p].pal[4 * i + 1] = 0;
			_palettes[p].pal[4 * i + 2] = 0;
			_palettes[p].pal[4 * i + 3] = 0;
		}

		p++;
	}

	f.close();

	_palNum = 0;
	_maxPalNum = p;
	setPalette(_palettes[_palNum].pal);
	_lut = _lut2 = _yuvLookup[0];
	_curPal = -1;
	_cr = 0;
	buildLookup(_palNum, 256);
	_lut2 = _yuvLookup[1];
	_lutCalcNum = (BITDEPTH + _palettes[_palNum].end + 2) / (_palettes[_palNum].end + 2);
#else
	buildLookup();
	_overlay = (OverlayColor *)calloc(_movieScale * _movieWidth * _movieScale * _movieHeight, sizeof(OverlayColor));
	_sys->showOverlay();
#endif

	// Open MPEG2 stream
	_mpegFile = new Common::File();
	sprintf(tempFile, "%s.mp2", name);
	if (!_mpegFile->open(tempFile)) {
		warning("Cutscene: Could not open %s", tempFile);
		return false;
	}

	// Load and configure decoder
	_mpegDecoder = mpeg2_init();
	if (_mpegDecoder == NULL) {
		warning("Cutscene: Could not allocate an MPEG2 decoder");
		return false;
	}

	_mpegInfo = mpeg2_info(_mpegDecoder);
	_frameNum = 0;

	return true;
#else /* USE_MPEG2 */
	return false;
#endif
}
Beispiel #23
0
/*****************************************************************************
 * OpenDecoder: probe the decoder and return score
 *****************************************************************************/
static int OpenDecoder( vlc_object_t *p_this )
{
    decoder_t *p_dec = (decoder_t*)p_this;
    decoder_sys_t *p_sys;
    uint32_t i_accel = 0;

    if( p_dec->fmt_in.i_codec != VLC_CODEC_MPGV )
        return VLC_EGENERIC;

    /* Select onl recognized original format (standard mpeg video) */
    switch( p_dec->fmt_in.i_original_fourcc )
    {
    case VLC_FOURCC('m','p','g','1'):
    case VLC_FOURCC('m','p','g','2'):
    case VLC_FOURCC('m','p','g','v'):
    case VLC_FOURCC('P','I','M','1'):
    case VLC_FOURCC('h','d','v','2'):
        break;
    default:
        if( p_dec->fmt_in.i_original_fourcc )
            return VLC_EGENERIC;
        break;
    }

    /* Allocate the memory needed to store the decoder's structure */
    if( ( p_dec->p_sys = p_sys = calloc( 1, sizeof(*p_sys)) ) == NULL )
        return VLC_ENOMEM;

    /* Initialize the thread properties */
    p_sys->p_mpeg2dec = NULL;
    p_sys->p_synchro  = NULL;
    p_sys->p_info     = NULL;
    p_sys->i_current_pts  = 0;
    p_sys->i_previous_pts = 0;
    p_sys->i_current_dts  = 0;
    p_sys->i_previous_dts = 0;
    p_sys->b_garbage_pic = false;
    p_sys->b_slice_i  = false;
    p_sys->b_second_field = false;
    p_sys->b_skip     = false;
    p_sys->b_preroll = false;
    DpbInit( p_dec );

    p_sys->i_cc_pts = 0;
    p_sys->i_cc_dts = 0;
    p_sys->i_cc_flags = 0;
#if MPEG2_RELEASE >= MPEG2_VERSION (0, 5, 0)
    p_dec->pf_get_cc = GetCc;
    cc_Init( &p_sys->cc );
#endif
    p_sys->p_gop_user_data = NULL;
    p_sys->i_gop_user_data = 0;

#if defined( __i386__ ) || defined( __x86_64__ )
    if( vlc_CPU() & CPU_CAPABILITY_MMX )
    {
        i_accel |= MPEG2_ACCEL_X86_MMX;
    }

    if( vlc_CPU() & CPU_CAPABILITY_3DNOW )
    {
        i_accel |= MPEG2_ACCEL_X86_3DNOW;
    }

    if( vlc_CPU() & CPU_CAPABILITY_MMXEXT )
    {
        i_accel |= MPEG2_ACCEL_X86_MMXEXT;
    }

#elif defined( __powerpc__ ) || defined( __ppc__ ) || defined( __ppc64__ )
    if( vlc_CPU() & CPU_CAPABILITY_ALTIVEC )
    {
        i_accel |= MPEG2_ACCEL_PPC_ALTIVEC;
    }

#else
    /* If we do not know this CPU, trust libmpeg2's feature detection */
    i_accel = MPEG2_ACCEL_DETECT;

#endif

    /* Set CPU acceleration features */
    mpeg2_accel( i_accel );

    /* Initialize decoder */
    p_sys->p_mpeg2dec = mpeg2_init();
    if( p_sys->p_mpeg2dec == NULL)
    {
        msg_Err( p_dec, "mpeg2_init() failed" );
        free( p_sys );
        return VLC_EGENERIC;
    }

    p_sys->p_info = mpeg2_info( p_sys->p_mpeg2dec );

    p_dec->pf_decode_video = DecodeBlock;
    p_dec->fmt_out.i_cat = VIDEO_ES;
    p_dec->fmt_out.i_codec = 0;

    return VLC_SUCCESS;
}
Beispiel #24
0
static int mpeg2dec_decode (codec_data_t *ptr,
			    frame_timestamp_t *pts, 
			    int from_rtp,
			    int *sync_frame,
			    uint8_t *buffer, 
			    uint32_t buflen,
			    void *ud)
{
  mpeg2dec_codec_t *mpeg2dec = (mpeg2dec_codec_t *)ptr;
  mpeg2dec_t *decoder;
  const mpeg2_info_t *info;
  mpeg2_state_t state;
  uint64_t ts = pts->msec_timestamp;

  decoder = mpeg2dec->m_decoder;
  
#if 0
  mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "ts buflen %d "U64, buflen, ts);
  //if (mpeg2dec->m_did_pause != 0) 
 {
    for (uint32_t ix = 0; ix < buflen + 3; ix++) {
      if (buffer[ix] == 0 &&
	  buffer[ix + 1] == 0 &&
	  buffer[ix + 2] == 1) {
	mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "index %d - value %x %x %x", 
			      ix, buffer[ix + 3], buffer[ix + 4],
			      buffer[ix + 5]);
      }
    }
  }
#endif

 info = mpeg2_info(decoder);
 bool passed_buffer = false;
 bool finished_buffer = false;
 do {
   state = mpeg2_parse(decoder);
   //mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "state %d", state);
   const mpeg2_sequence_t *sequence;
   sequence = info->sequence;
   switch (state) {
   case STATE_BUFFER:
     if (passed_buffer == false) {
       mpeg2_buffer(decoder, buffer, buffer + buflen);
       passed_buffer = true;
     } else {
       finished_buffer = true;
     } 
     break;
   case STATE_SEQUENCE: {
     if (mpeg2dec->m_video_initialized == 0) {
       mpeg2dec->m_h = sequence->height;
       mpeg2dec->m_w = sequence->width;
       int have_mpeg2;
       uint32_t height;
       uint32_t width;
       double frame_rate;
       double bitrate;
       double aspect_ratio;
       uint8_t profile;
       if (MP4AV_Mpeg3ParseSeqHdr(buffer, 
				  buflen,
				  &have_mpeg2, 
				  &height, 
				  &width, 
				  &frame_rate,
				  &bitrate, 
				  &aspect_ratio,
				  &profile) < 0) {
	 
	 mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "pix w %u pix h %u", 
				  sequence->pixel_width, 
				  sequence->pixel_height);
	 aspect_ratio = sequence->pixel_width;
	 aspect_ratio *= mpeg2dec->m_w;
	 aspect_ratio /= (double)(sequence->pixel_height * mpeg2dec->m_h);
       }
       mpeg2dec->pts_convert.frame_rate = frame_rate;
       mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "%ux%u aspect %g", 
				mpeg2dec->m_w, mpeg2dec->m_h, 
				aspect_ratio);
       mpeg2dec->m_vft->video_configure(mpeg2dec->m_ifptr, 
					mpeg2dec->m_w,
					mpeg2dec->m_h,
					VIDEO_FORMAT_YUV,
					aspect_ratio);
       mpeg2dec->m_video_initialized = 1;
     }
     break;
    }
   case STATE_SLICE:
   case STATE_END:
   case STATE_INVALID_END:  
     // INVALID_END state means they found a new sequence header, with a 
     // new size
     
#ifdef DEBUG_MPEG2DEC_FRAME
     mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "frame "U64" decoded", 
			  mpeg2dec->cached_ts);
#endif
     if (info->display_fbuf) {
       mpeg2dec->m_vft->video_have_frame(mpeg2dec->m_ifptr,
					 info->display_fbuf->buf[0],
					 info->display_fbuf->buf[1],
					 info->display_fbuf->buf[2],
					 sequence->width, 
					 sequence->chroma_width,
					 mpeg2dec->m_cached_ts_invalid ? ts :
					 mpeg2dec->cached_ts);
     }
     break;
   case STATE_SEQUENCE_REPEATED: // we don't care here
   case STATE_GOP: 
   case STATE_PICTURE:
   case STATE_SLICE_1ST:
   case STATE_PICTURE_2ND:
   case STATE_INVALID: //
   default:
     break;
   } 
 } while (finished_buffer == false);
 
 mpeg2dec->m_cached_ts_invalid = false;
 if (pts->timestamp_is_pts) {
   if (info->current_picture == NULL ||
       mpeg3_find_dts_from_pts(&mpeg2dec->pts_convert,
			       ts,
			       info->current_picture->flags & PIC_MASK_CODING_TYPE, 
			       info->current_picture->temporal_reference,
			       &mpeg2dec->cached_ts) < 0) {
     mpeg2dec->m_cached_ts_invalid = true;
   }
#if 0
   mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "pts "U64" dts "U64" temp %u type %u", 
			    pts->msec_timestamp, mpeg2dec->cached_ts, 
			    info->current_picture->temporal_reference, 
			    info->current_picture->flags & PIC_MASK_CODING_TYPE);
#endif

 } else {
   mpeg2dec->cached_ts = ts;
 }
 return (buflen);
}