static void sample1 (FILE * file) { #define BUFFER_SIZE 4096 uint8_t buffer[BUFFER_SIZE]; mpeg2dec_t * mpeg2dec; const mpeg2_info_t * info; int state; int size; int framenum = 0; mpeg2dec = mpeg2_init (); if (mpeg2dec == NULL) exit (1); info = mpeg2_info (mpeg2dec); size = BUFFER_SIZE; do { state = mpeg2_parse (mpeg2dec); switch (state) { case -1: size = fread (buffer, 1, BUFFER_SIZE, file); mpeg2_buffer (mpeg2dec, buffer, buffer + size); break; case STATE_SLICE: case STATE_END: if (info->display_fbuf) save_pgm (info->sequence->width, info->sequence->height, info->display_fbuf->buf, framenum++); break; } } while (size); mpeg2_close (mpeg2dec); }
unsigned int mpeg_idle(mpeg_struct_t *Mpeg_Struct, new_screen* ScreenBuff, FileInfo_t *mpgfile) { if(!Mpeg_Struct) return 0; if(!Mpeg_Struct->FrameReady) { do { Mpeg_Struct->state = mpeg2_parse(Mpeg_Struct->decoder); switch (Mpeg_Struct->state) { case STATE_BUFFER: //size = fread (buffer, 1, _BUFFER_SIZE_, mpgfile); Mpeg_Struct->size = read_from_buffer(Mpeg_Struct->buffer, 1, _BUFFER_SIZE_, mpgfile); if(!Mpeg_Struct->size) break; mpeg2_buffer (Mpeg_Struct->decoder, Mpeg_Struct->buffer, Mpeg_Struct->buffer + Mpeg_Struct->size); break; case STATE_SEQUENCE: mpeg2_convert (Mpeg_Struct->decoder, Mpeg_Struct->mpeg_convert, NULL); break; case STATE_SLICE: case STATE_END: case STATE_INVALID_END: if (Mpeg_Struct->info->display_fbuf) Mpeg_Struct->FrameReady = 1; break; default: break; } }while(!Mpeg_Struct->FrameReady && Mpeg_Struct->size); } #ifndef AVR32 if(Mpeg_Struct->FrameReady == true && (timer_tick(&Mpeg_Struct->FrameDisplay) == true || Mpeg_Struct->EnableFrameLimit == false)) #else if(Mpeg_Struct->FrameReady == true) #endif { Mpeg_Struct->FrameReady = false; /*save_ppm (ScreenBuff, Mpeg_Struct->info->sequence->width, Mpeg_Struct->info->sequence->height, Mpeg_Struct->info->display_fbuf->buf[0], Mpeg_Struct->framenum++);*/ _Fps++; //Mpeg_Struct->->info if(Mpeg_Struct->info->current_picture->temporal_reference != Mpeg_Struct->temporal_reference) { Mpeg_Struct->temporal_reference = Mpeg_Struct->info->current_picture->temporal_reference; Mpeg_Struct->CallbackDisplayFrame((void*)Mpeg_Struct->CallbackDisplayFrameVariable, Mpeg_Struct->info->display_fbuf->buf[0], 0, 0, Mpeg_Struct->info->sequence->width, Mpeg_Struct->info->sequence->height); } if(CntToDetermineTheFps != rtcSecUpdate) { CntToDetermineTheFps = rtcSecUpdate; //UARTPuts(DebugCom, "Screen fill capability = ", -1); UARTPutNum(DebugCom, _Fps); UARTPuts(DebugCom, "Fps\n\r", -1); _Fps = 0; } } //if(Mpeg_Struct->size == 0) mpeg2_close (Mpeg_Struct->decoder); return Mpeg_Struct->size; }
int decode_frame() { uint8_t buffer[4096]; mpeg2_state_t state; size_t size; const mpeg2_info_t *info = mpeg2_info(decoder); for (;;) { state = mpeg2_parse(decoder); switch (state) { case STATE_BUFFER: size = fread(buffer, 1, 4096, source); if (size <= 0) { return -1; } mpeg2_buffer(decoder, buffer, buffer + 4096); break; case STATE_SEQUENCE: mpeg2_convert(decoder, mpeg2convert_rgb32, NULL); break; case STATE_SLICE: case STATE_END: case STATE_INVALID_END: present_frame( info->sequence->width, info->sequence->height, info->display_fbuf->buf[0]); return 0; case STATE_INVALID: return -2; default: break; } } }
mpeg2_state_t CMpeg2Decoder::Parse() { if (!m_pDec) return STATE_INVALID; return mpeg2_parse(m_pDec); }
void decode_mpeg2(decode_t *decode) { mpeg2dec_t *decoder = NULL; const mpeg2_info_t *info = NULL; const mpeg2_sequence_t *sequence = NULL; mpeg2_state_t state; size_t size; uint32_t ac = 0; WriteDataFn writer = write_yuv420p; if (decode->format == TC_CODEC_RGB) { tc_log_info(__FILE__, "using libmpeg2convert" " RGB24 conversion"); writer = write_rgb24; } ac = mpeg2_accel(MPEG2_ACCEL_DETECT); show_accel(ac); decoder = mpeg2_init(); if (decoder == NULL) { tc_log_error(__FILE__, "Could not allocate a decoder object."); import_exit(1); } info = mpeg2_info(decoder); size = (size_t)-1; do { state = mpeg2_parse(decoder); sequence = info->sequence; switch (state) { case STATE_BUFFER: size = tc_pread(decode->fd_in, buffer, BUFFER_SIZE); mpeg2_buffer(decoder, buffer, buffer + size); break; case STATE_SEQUENCE: if (decode->format == TC_CODEC_RGB) { mpeg2_convert(decoder, mpeg2convert_rgb24, NULL); } break; case STATE_SLICE: case STATE_END: case STATE_INVALID_END: if (info->display_fbuf) { writer(decode, info, sequence); } break; default: /* can't happen */ break; } } while (size); mpeg2_close(decoder); import_exit(0); }
bool MPEGDecoder::decodePacket(Common::SeekableReadStream *packet, uint32 &framePeriod, Graphics::Surface *dst) { // Decode as much as we can out of this packet uint32 size = 0xFFFFFFFF; mpeg2_state_t state; bool foundFrame = false; framePeriod = 0; do { state = mpeg2_parse(_mpegDecoder); switch (state) { case STATE_BUFFER: size = packet->read(_buffer, BUFFER_SIZE); mpeg2_buffer(_mpegDecoder, _buffer, _buffer + size); break; case STATE_SLICE: case STATE_END: if (_mpegInfo->display_fbuf) { foundFrame = true; const mpeg2_sequence_t *sequence = _mpegInfo->sequence; framePeriod += sequence->frame_period; if (!dst) { // If no destination is specified, use our internal storage if (!_surface) { _surface = new Graphics::Surface(); _surface->create(sequence->picture_width, sequence->picture_height, _pixelFormat); } dst = _surface; } YUVToRGBMan.convert420(dst, Graphics::YUVToRGBManager::kScaleITU, _mpegInfo->display_fbuf->buf[0], _mpegInfo->display_fbuf->buf[1], _mpegInfo->display_fbuf->buf[2], sequence->picture_width, sequence->picture_height, sequence->width, sequence->chroma_width); } break; default: break; } } while (size != 0); return foundFrame; }
static void sample2 (FILE * mpgfile) { #define BUFFER_SIZE 4096 uint8_t buffer[BUFFER_SIZE]; mpeg2dec_t * decoder; const mpeg2_info_t * info; mpeg2_state_t state; size_t size; int framenum = 0; decoder = mpeg2_init (); if (decoder == NULL) { fprintf (stderr, "Could not allocate a decoder object.\n"); exit (1); } info = mpeg2_info (decoder); size = (size_t)-1; do { state = mpeg2_parse (decoder); switch (state) { case STATE_BUFFER: size = fread (buffer, 1, BUFFER_SIZE, mpgfile); mpeg2_buffer (decoder, buffer, buffer + size); break; case STATE_SEQUENCE: mpeg2_convert (decoder, mpeg2convert_rgb24, NULL); break; case STATE_SLICE: case STATE_END: case STATE_INVALID_END: if (info->display_fbuf) save_ppm (info->sequence->width, info->sequence->height, info->display_fbuf->buf[0], framenum++); break; default: break; } } while (size); mpeg2_close (decoder); }
bool BaseAnimationState::decodeFrame() { #ifdef USE_MPEG2 mpeg2_state_t state; const mpeg2_sequence_t *sequence_i; size_t size = (size_t) -1; static byte buf[BUFFER_SIZE]; do { state = mpeg2_parse(_mpegDecoder); sequence_i = _mpegInfo->sequence; switch (state) { case STATE_BUFFER: size = _mpegFile->read(buf, BUFFER_SIZE); mpeg2_buffer(_mpegDecoder, buf, buf + size); break; case STATE_SLICE: case STATE_END: if (_mpegInfo->display_fbuf) { checkPaletteSwitch(); drawYUV(sequence_i->width, sequence_i->height, _mpegInfo->display_fbuf->buf); #ifdef BACKEND_8BIT buildLookup(_palNum + 1, _lutCalcNum); #endif _frameNum++; return true; } break; default: break; } } while (size); #endif return false; }
static void sample5 (FILE * mpgfile) { #define BUFFER_SIZE 4096 #define ALIGN_16(p) ((void *)(((uintptr_t)(p) + 15) & ~((uintptr_t)15))) uint8_t buffer[BUFFER_SIZE]; mpeg2dec_t * decoder; const mpeg2_info_t * info; const mpeg2_sequence_t * sequence; mpeg2_state_t state; size_t size; int framenum = 0; int i, j; struct fbuf_s * current_fbuf; decoder = mpeg2_init (); if (decoder == NULL) { fprintf (stderr, "Could not allocate a decoder object.\n"); exit (1); } info = mpeg2_info (decoder); size = (size_t)-1; do { state = mpeg2_parse (decoder); sequence = info->sequence; switch (state) { case STATE_BUFFER: size = fread (buffer, 1, BUFFER_SIZE, mpgfile); mpeg2_buffer (decoder, buffer, buffer + size); break; case STATE_SEQUENCE: mpeg2_custom_fbuf (decoder, 1); for (i = 0; i < 3; i++) { fbuf[i].mbuf[0] = (uint8_t *) malloc (sequence->width * sequence->height + 15); fbuf[i].mbuf[1] = (uint8_t *) malloc (sequence->chroma_width * sequence->chroma_height + 15); fbuf[i].mbuf[2] = (uint8_t *) malloc (sequence->chroma_width * sequence->chroma_height + 15); if (!fbuf[i].mbuf[0] || !fbuf[i].mbuf[1] || !fbuf[i].mbuf[2]) { fprintf (stderr, "Could not allocate an output buffer.\n"); exit (1); } for (j = 0; j < 3; j++) fbuf[i].yuv[j] = ALIGN_16 (fbuf[i].mbuf[j]); fbuf[i].used = 0; } for (i = 0; i < 2; i++) { current_fbuf = get_fbuf (); mpeg2_set_buf (decoder, current_fbuf->yuv, current_fbuf); } break; case STATE_PICTURE: current_fbuf = get_fbuf (); mpeg2_set_buf (decoder, current_fbuf->yuv, current_fbuf); break; case STATE_SLICE: case STATE_END: case STATE_INVALID_END: if (info->display_fbuf) save_pgm (sequence->width, sequence->height, sequence->chroma_width, sequence->chroma_height, info->display_fbuf->buf, framenum++); if (info->discard_fbuf) ((struct fbuf_s *)info->discard_fbuf->id)->used = 0; if (state != STATE_SLICE) for (i = 0; i < 3; i++) for (j = 0; j < 3; j++) free (fbuf[i].mbuf[j]); break; default: break; } } while (size); mpeg2_close (decoder); }
HRESULT TvideoCodecLibmpeg2::decompressI(const unsigned char *src,size_t srcLen,IMediaSample *pIn) { TffdshowVideoInputPin::TrateAndFlush *rateInfo = (TffdshowVideoInputPin::TrateAndFlush*)deciV->getRateInfo(); if (pIn->IsDiscontinuity() == S_OK) { rateInfo->isDiscontinuity = true; } REFERENCE_TIME rtStart=REFTIME_INVALID,rtStop=_I64_MIN; HRESULT hr_GetTime = pIn->GetTime(&rtStart,&rtStop); if (FAILED(hr_GetTime)) { rtStart=rtStop=REFTIME_INVALID; } int len=(int)srcLen; while (len>=0) { mpeg2_state_t state=mpeg2_parse(mpeg2dec); switch (state) { case STATE_BUFFER: { if (len==0) { len=-1; } else { mpeg2_buffer(mpeg2dec,src,src+len); len=0; } break; } case STATE_INVALID: break; case STATE_GOP: { if(rateInfo->rate.Rate == 10000 && mpeg2dec->info.user_data_len > 4 && *(DWORD*)mpeg2dec->info.user_data == 0xf8014343) { if (!ccDecoder) { ccDecoder=new TccDecoder(deciV); } ccDecoder->decode(mpeg2dec->info.user_data+2,mpeg2dec->info.user_data_len-2); } break; } case STATE_SEQUENCE: { sequenceFlag=FIELD_TYPE::SEQ_START; avgTimePerFrame=10LL*info->sequence->frame_period/27; deciV->setAverageTimePerFrame(&avgTimePerFrame,true); break; } case STATE_PICTURE: { mpeg2dec->decoder.quant_stride=quantsStride=quantsDx=(info->sequence->picture_width+15)>>4; quantsDy=(info->sequence->picture_height+15)>>4; quants=mpeg2dec->decoder.quant_store=(char*)realloc(quants, quantsStride*quantsDy*2); quantType=1; // Remove const cast mpeg2_picture_t* CurrentPicture = (mpeg2_picture_t*)mpeg2_info(mpeg2dec)->current_picture; // skip preroll pictures as well as non I frames during ff or rew if(pIn->IsPreroll()==S_OK || (rateInfo->rate.Rate < (10000 / TffdshowVideoInputPin::MAX_SPEED) && (CurrentPicture->flags&PIC_MASK_CODING_TYPE) != PIC_FLAG_CODING_TYPE_I)) { // DPRINTF(_l("Skip preroll frame\n")); rateInfo->isDiscontinuity = true; CurrentPicture->flags |= PIC_FLAG_SKIP; } mpeg2_set_rtStart(mpeg2dec,rtStart); rtStart=REFTIME_INVALID; break; } case STATE_END: sequenceFlag |= FIELD_TYPE::SEQ_END; case STATE_SLICE: if (info->display_picture && info->discard_fbuf && !(info->display_picture->flags&PIC_FLAG_SKIP)) { { int frametype; if (info->sequence->flags&SEQ_FLAG_MPEG2) { quantType=FF_QSCALE_TYPE_MPEG2; } switch (info->display_picture->flags&PIC_MASK_CODING_TYPE) { case PIC_FLAG_CODING_TYPE_I: frametype=FRAME_TYPE::I; break; case PIC_FLAG_CODING_TYPE_B: frametype=FRAME_TYPE::B; break; default: case PIC_FLAG_CODING_TYPE_P: frametype=FRAME_TYPE::P; break; } if (frametype==FRAME_TYPE::I) { wait4Iframe=false; } if (pIn->IsPreroll()==S_OK) { return sinkD->deliverPreroll(frametype); } int fieldtype = SetDeinterlaceMethod(); if (sequenceFlag != FIELD_TYPE::SEQ_START || frametype == FRAME_TYPE::I) { fieldtype|=sequenceFlag; sequenceFlag=0; } unsigned char *data[4]= {info->display_fbuf->buf[0],info->display_fbuf->buf[1],info->display_fbuf->buf[2],NULL}; stride_t stride[4]= {info->sequence->width,info->sequence->chroma_width,info->sequence->chroma_width,0}; uint64_t csp; switch ((info->sequence->chroma_width==info->sequence->width)+(info->sequence->chroma_height==info->sequence->height)) { case 1: csp=FF_CSP_422P; break; case 2: csp=FF_CSP_444P; break; default: case 0: csp=FF_CSP_420P; break; } Trect r(0,0,info->sequence->picture_width,info->sequence->picture_height); r.sar = Rational(info->sequence->pixel_width,info->sequence->pixel_height); // Correct impossible sar for DVD if (info->sequence->flags & SEQ_FLAG_MPEG2) { r.sar = guessMPEG2sar(r, Rational(info->sequence->pixel_width2,info->sequence->pixel_height2), containerSar); } TffPict pict(csp,data,stride,r,true,frametype,fieldtype,srcLen,NULL); //TODO: src frame size pict.film = m_fFilm; if(frametype == FRAME_TYPE::I) { pict.rtStart = info->display_picture->rtStart; } else { pict.rtStart = oldpict.rtStop; } // cope with a change in rate if (rateInfo->rate.Rate != rateInfo->ratechange.Rate && rateInfo->flushed && frametype == FRAME_TYPE::I) { // Buggy DVD navigator does not work as it is documented. // DPRINTF(_l("rateInfo->ratechange.StartTime = %s rateInfo->rate.StartTime = %s rateInfo->rate.Rate %d"), Trt2str(rateInfo->ratechange.StartTime).c_str(),Trt2str(rateInfo->rate.StartTime).c_str(),rateInfo->rate.Rate); rateInfo->rate.StartTime = pict.rtStart; rateInfo->rate.Rate = rateInfo->ratechange.Rate; rateInfo->isDiscontinuity = true; // DPRINTF(_l("Got Rate StartTime = %s Rate = %d\n"), Trt2str(rateInfo->rate.StartTime).c_str(), rateInfo->rate.Rate); } if ((rateInfo->isDiscontinuity || rateInfo->correctTS) && frametype == FRAME_TYPE::I) { // if we're at a Discontinuity use the times we're being sent in // DPRINTF((ffstring(L"rateInfo->isDiscontinuity found. pict.rtStart ") + Trt2str(pict.rtStart) + L" rateInfo->rate.StartTime " + Trt2str(rateInfo->rate.StartTime)).c_str()); pict.rtStart = rateInfo->rate.StartTime + (pict.rtStart - rateInfo->rate.StartTime) * abs(rateInfo->rate.Rate) / 10000; // DPRINTF(_l("rateInfo->isDiscontinuity found. updating rtStart %s oldpict.rtStop %s"),Trt2str(pict.rtStart).c_str(), Trt2str(oldpict.rtStop).c_str()); pict.discontinuity = rateInfo->isDiscontinuity; rateInfo->isDiscontinuity = false; } else { pict.rtStart = oldpict.rtStop; } unsigned int field_count = 2; if (info->display_picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD) { field_count++; } if (rateInfo->rate.Rate < (10000 / TffdshowVideoInputPin::MAX_SPEED)) { pict.rtStop = pict.rtStart + avgTimePerFrame; } else pict.rtStop = pict.rtStart + (avgTimePerFrame * field_count * abs(rateInfo->rate.Rate) / (2 * 10000)); if (rateInfo->rate.Rate < (10000 / TffdshowVideoInputPin::MAX_SPEED)) { pict.fieldtype |= FIELD_TYPE::SEQ_START | FIELD_TYPE::SEQ_END; } oldpict=pict; if (rateInfo->isDiscontinuity) { telecineManager.onSeek(); } // soft telecine detection // if "Detect soft telecine and average frame durations" is enabled, // flames are flagged as progressive, frame durations are averaged. // pict.film is valid even if the setting is disabled. telecineManager.new_frame( !!(info->display_picture->flags & PIC_FLAG_TOP_FIELD_FIRST), !!(info->display_picture->flags & PIC_FLAG_REPEAT_FIRST_FIELD), pict.rtStart, pict.rtStop); } if (!wait4Iframe) { TffPict pict(oldpict); telecineManager.get_fieldtype(pict); telecineManager.get_timestamps(pict); HRESULT hr = sinkD->deliverDecodedSample(pict); if (hr != S_OK) { return hr; } } // else DPRINTF(_l("libmpeg2: waiting for keyframe")); } break; } } return S_OK; }
static GstFlowReturn gst_mpeg2dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) { GstMpeg2dec *mpeg2dec = GST_MPEG2DEC (decoder); GstBuffer *buf = frame->input_buffer; GstMapInfo minfo; const mpeg2_info_t *info; mpeg2_state_t state; gboolean done = FALSE; GstFlowReturn ret = GST_FLOW_OK; GST_LOG_OBJECT (mpeg2dec, "received frame %d, timestamp %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT, frame->system_frame_number, GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (frame->duration)); gst_buffer_ref (buf); if (!gst_buffer_map (buf, &minfo, GST_MAP_READ)) { GST_ERROR_OBJECT (mpeg2dec, "Failed to map input buffer"); return GST_FLOW_ERROR; } info = mpeg2dec->info; GST_LOG_OBJECT (mpeg2dec, "calling mpeg2_buffer"); mpeg2_buffer (mpeg2dec->decoder, minfo.data, minfo.data + minfo.size); GST_LOG_OBJECT (mpeg2dec, "calling mpeg2_buffer done"); while (!done) { GST_LOG_OBJECT (mpeg2dec, "calling parse"); state = mpeg2_parse (mpeg2dec->decoder); GST_DEBUG_OBJECT (mpeg2dec, "parse state %d", state); switch (state) { #if MPEG2_RELEASE >= MPEG2_VERSION (0, 5, 0) case STATE_SEQUENCE_MODIFIED: GST_DEBUG_OBJECT (mpeg2dec, "sequence modified"); mpeg2dec->discont_state = MPEG2DEC_DISC_NEW_PICTURE; gst_mpeg2dec_clear_buffers (mpeg2dec); /* fall through */ #endif case STATE_SEQUENCE: ret = handle_sequence (mpeg2dec, info); /* if there is an error handling the sequence * reset the decoder, maybe something more elegant * could be done. */ if (ret == GST_FLOW_ERROR) { GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE, ("decoding error"), ("Bad sequence header"), ret); gst_video_decoder_drop_frame (decoder, frame); gst_mpeg2dec_flush (decoder); goto done; } break; case STATE_SEQUENCE_REPEATED: GST_DEBUG_OBJECT (mpeg2dec, "sequence repeated"); break; case STATE_GOP: GST_DEBUG_OBJECT (mpeg2dec, "gop"); break; case STATE_PICTURE: ret = handle_picture (mpeg2dec, info, frame); break; case STATE_SLICE_1ST: GST_LOG_OBJECT (mpeg2dec, "1st slice of frame encountered"); break; case STATE_PICTURE_2ND: GST_LOG_OBJECT (mpeg2dec, "Second picture header encountered. Decoding 2nd field"); break; #if MPEG2_RELEASE >= MPEG2_VERSION (0, 4, 0) case STATE_INVALID_END: GST_DEBUG_OBJECT (mpeg2dec, "invalid end"); #endif case STATE_END: GST_DEBUG_OBJECT (mpeg2dec, "end"); case STATE_SLICE: GST_DEBUG_OBJECT (mpeg2dec, "display_fbuf:%p, discard_fbuf:%p", info->display_fbuf, info->discard_fbuf); if (info->display_fbuf && info->display_fbuf->id) { ret = handle_slice (mpeg2dec, info); } else { GST_DEBUG_OBJECT (mpeg2dec, "no picture to display"); } if (info->discard_fbuf && info->discard_fbuf->id) gst_mpeg2dec_discard_buffer (mpeg2dec, GPOINTER_TO_INT (info->discard_fbuf->id) - 1); if (state != STATE_SLICE) { gst_mpeg2dec_clear_buffers (mpeg2dec); } break; case STATE_BUFFER: done = TRUE; break; /* error */ case STATE_INVALID: GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE, ("decoding error"), ("Reached libmpeg2 invalid state"), ret); continue; default: GST_ERROR_OBJECT (mpeg2dec, "Unknown libmpeg2 state %d, FIXME", state); ret = GST_FLOW_OK; gst_video_codec_frame_unref (frame); goto done; } if (ret != GST_FLOW_OK) { GST_DEBUG_OBJECT (mpeg2dec, "exit loop, reason %s", gst_flow_get_name (ret)); break; } } gst_video_codec_frame_unref (frame); done: gst_buffer_unmap (buf, &minfo); gst_buffer_unref (buf); return ret; }
int PrivateDecoderMPEG2::GetFrame(AVStream *stream, AVFrame *picture, int *got_picture_ptr, AVPacket *pkt) { AVCodecContext *avctx = stream->codec; *got_picture_ptr = 0; const mpeg2_info_t *info = mpeg2_info(mpeg2dec); mpeg2_buffer(mpeg2dec, pkt->data, pkt->data + pkt->size); while (1) { switch (mpeg2_parse(mpeg2dec)) { case STATE_SEQUENCE: // libmpeg2 needs three buffers to do its work. // We set up two prediction buffers here, from // the set of available video frames. mpeg2_custom_fbuf(mpeg2dec, 1); for (int i = 0; i < 2; i++) { avctx->get_buffer(avctx, picture); mpeg2_set_buf(mpeg2dec, picture->data, picture->opaque); } break; case STATE_PICTURE: // This sets up the third buffer for libmpeg2. // We use up one of the three buffers for each // frame shown. The frames get released once // they are drawn (outside this routine). avctx->get_buffer(avctx, picture); mpeg2_set_buf(mpeg2dec, picture->data, picture->opaque); break; case STATE_BUFFER: // We're finished with the buffer... if (partialFrames.size()) { AVFrame *frm = partialFrames.dequeue(); *got_picture_ptr = 1; *picture = *frm; delete frm; #if 0 QString msg(""); AvFormatDecoder *nd = (AvFormatDecoder *)(avctx->opaque); if (nd && nd->GetNVP() && nd->GetNVP()->getVideoOutput()) msg = nd->GetNVP()->getVideoOutput()->GetFrameStatus(); VERBOSE(VB_IMPORTANT, "ret frame: "<<picture->opaque <<" "<<msg); #endif } return pkt->size; case STATE_INVALID: // This is the error state. The decoder must be // reset on an error. Reset(); return -1; case STATE_SLICE: case STATE_END: case STATE_INVALID_END: if (info->display_fbuf) { bool exists = false; avframe_q::iterator it = partialFrames.begin(); for (; it != partialFrames.end(); ++it) if ((*it)->opaque == info->display_fbuf->id) exists = true; if (!exists) { AVFrame *frm = new AVFrame(); frm->data[0] = info->display_fbuf->buf[0]; frm->data[1] = info->display_fbuf->buf[1]; frm->data[2] = info->display_fbuf->buf[2]; frm->data[3] = NULL; frm->opaque = info->display_fbuf->id; frm->type = FF_BUFFER_TYPE_USER; frm->top_field_first = !!(info->display_picture->flags & PIC_FLAG_TOP_FIELD_FIRST); frm->interlaced_frame = !(info->display_picture->flags & PIC_FLAG_PROGRESSIVE_FRAME); frm->repeat_pict = !!(info->display_picture->flags & #if CONFIG_LIBMPEG2EXTERNAL PIC_FLAG_REPEAT_FIRST_FIELD); #else PIC_FLAG_REPEAT_FIELD); #endif partialFrames.enqueue(frm); } } if (info->discard_fbuf) { bool exists = false; avframe_q::iterator it = partialFrames.begin(); for (; it != partialFrames.end(); ++it) { if ((*it)->opaque == info->discard_fbuf->id) { exists = true; (*it)->data[3] = (unsigned char*) 1; } } if (!exists) { AVFrame frame; frame.opaque = info->discard_fbuf->id; frame.type = FF_BUFFER_TYPE_USER; avctx->release_buffer(avctx, &frame); } } break; default: break; }
static void sample3 (FILE * mpgfile) { #define BUFFER_SIZE 4096 uint8_t buffer[BUFFER_SIZE]; mpeg2dec_t * decoder; const mpeg2_info_t * info; const mpeg2_sequence_t * sequence; mpeg2_state_t state; size_t size; int framenum = 0; uint8_t * fbuf[3][3]; int i, j; decoder = mpeg2_init (); if (decoder == NULL) { fprintf (stderr, "Could not allocate a decoder object.\n"); exit (1); } info = mpeg2_info (decoder); size = (size_t)-1; do { state = mpeg2_parse (decoder); sequence = info->sequence; switch (state) { case STATE_BUFFER: size = fread (buffer, 1, BUFFER_SIZE, mpgfile); mpeg2_buffer (decoder, buffer, buffer + size); break; case STATE_SEQUENCE: for (i = 0; i < 3; i++) { fbuf[i][0] = (uint8_t *) malloc (sequence->width * sequence->height); fbuf[i][1] = (uint8_t *) malloc (sequence->chroma_width * sequence->chroma_height); fbuf[i][2] = (uint8_t *) malloc (sequence->chroma_width * sequence->chroma_height); if (!fbuf[i][0] || !fbuf[i][1] || !fbuf[i][2]) { fprintf (stderr, "Could not allocate an output buffer.\n"); exit (1); } mpeg2_set_buf (decoder, fbuf[i], NULL); } break; case STATE_SLICE: case STATE_END: case STATE_INVALID_END: if (info->display_fbuf) save_pgm (sequence->width, sequence->height, sequence->chroma_width, sequence->chroma_height, info->display_fbuf->buf, framenum++); if (state != STATE_SLICE) for (i = 0; i < 3; i++) for (j = 0; j < 3; j++) free (fbuf[i][j]); break; default: break; } } while (size); mpeg2_close (decoder); }
static int _initmpeg2(FILE *mpgfile,int *w, int *h) { static uint8_t buffer[BUFFER_SIZE]; mpeg2_state_t state; struct fbuf_s * current_fbuf; size_t size; int pixels; int i; global_mpegfile=mpgfile; if (decoder!=NULL) { mpeg2_close (decoder); decoder=NULL; } decoder = mpeg2_init (); if (decoder == NULL) { fprintf (stderr, "Could not allocate a decoder object.\n"); exit (1); } info = mpeg2_info (decoder); size = (size_t)-1; do { state = mpeg2_parse (decoder); if (state==STATE_BUFFER) { //fprintf(stderr,"Got STATE_BUFFER\n"); size = fread (buffer, 1, BUFFER_SIZE, global_mpegfile); mpeg2_buffer (decoder, buffer, buffer + size); } else if (state==STATE_SEQUENCE) { //fprintf(stderr,"Got STATE_SEQUENCE\n"); mpeg2_convert (decoder, mpeg2convert_rgb24, NULL); mpeg2_custom_fbuf (decoder, 1); if (w!=NULL) *w=info->sequence->width; if (h!=NULL) *h=info->sequence->height; pixels = info->sequence->width * info->sequence->height; for (i = 0; i < 3; i++) { fbuf[i].rgb[0] = (uint8_t *) malloc (3 * pixels); fbuf[i].rgb[1] = fbuf[i].rgb[2] = NULL; if (!fbuf[i].rgb[0]) { fprintf (stderr, "Could not allocate an output buffer.\n"); exit (1); } fbuf[i].used = 0; } for (i = 0; i < 2; i++) { current_fbuf = get_fbuf (); mpeg2_set_buf (decoder, current_fbuf->rgb, current_fbuf); } break; } else if (state==STATE_PICTURE || state==STATE_SLICE || state==STATE_END || state==STATE_INVALID_END) { //if (state==STATE_SLICE) fprintf(stderr,"Got STATE_PICTURE\n"); //if (state==STATE_SLICE) fprintf(stderr,"Got STATE_SLICE\n"); //if (state==STATE_END) fprintf(stderr,"Got STATE_END\n"); //if (state==STATE_INVALID_END) fprintf(stderr,"Got STATE_INVALID_END\n"); fprintf(stderr,"GOT unexpected state during initialization.\n"); return 0; } } while (size); return 1; }
int _getmpeg2frame(unsigned char *rgb,int gray) { int i; static uint8_t buffer[BUFFER_SIZE]; mpeg2_state_t state; size_t size; int pixels; struct fbuf_s * current_fbuf; int done=0; size = (size_t)-1; do { state = mpeg2_parse (decoder); if (state==STATE_BUFFER) { //fprintf(stderr,"Got STATE_BUFFER\n"); size = fread (buffer, 1, BUFFER_SIZE, global_mpegfile); mpeg2_buffer (decoder, buffer, buffer + size); } else if (state==STATE_SEQUENCE) { //fprintf(stderr,"Got STATE_SEQUENCE\n"); mpeg2_convert (decoder, mpeg2convert_rgb24, NULL); mpeg2_custom_fbuf (decoder, 1); pixels = info->sequence->width * info->sequence->height; for (i = 0; i < 3; i++) { fbuf[i].rgb[0] = (uint8_t *) malloc (3 * pixels); fbuf[i].rgb[1] = fbuf[i].rgb[2] = NULL; if (!fbuf[i].rgb[0]) { fprintf (stderr, "Could not allocate an output buffer.\n"); exit (1); } fbuf[i].used = 0; } for (i = 0; i < 2; i++) { current_fbuf = get_fbuf (); mpeg2_set_buf (decoder, current_fbuf->rgb, current_fbuf); } } else if (state==STATE_PICTURE) { //fprintf(stderr,"Got STATE_PICTURE\n"); current_fbuf = get_fbuf (); mpeg2_set_buf (decoder, current_fbuf->rgb, current_fbuf); } else if (state==STATE_SLICE || state==STATE_END || state==STATE_INVALID_END) { //if (state==STATE_SLICE) fprintf(stderr,"Got STATE_SLICE\n"); //if (state==STATE_END) fprintf(stderr,"Got STATE_END\n"); //if (state==STATE_INVALID_END) fprintf(stderr,"Got STATE_INVALID_END\n"); if (info->display_fbuf) { int jj; int pixels; uint8_t *base; base=info->display_fbuf->buf[0]; /* we have a complete image ready */ if (gray) { pixels=info->sequence->width*info->sequence->height; for (jj=0;jj<pixels;jj++) { rgb[jj]=(base[0]+base[1]+base[3])/3; base+=3; } } else { pixels=3*info->sequence->width*info->sequence->height; /* we have a complete image ready */ memcpy(rgb,base,pixels*sizeof(unsigned char)); } done=1; } if (info->discard_fbuf) ((struct fbuf_s *)info->discard_fbuf->id)->used = 0; if (state != STATE_SLICE) for (i = 0; i < 3; i++) free (fbuf[i].rgb[0]); if (state==STATE_END) { //fprintf(stderr,"Got STATE_END\n"); rewindmpeg2(); } if (state==STATE_INVALID_END) { //fprintf(stderr,"Got STATE_INVALID_END\n"); rewindmpeg2(); } /* we got a single frame */ if (done) { if (size!=0) return 1; return 0; } } } while (size); return 0; }
void getframe(Mpeg2 *mpg, Texture *dst) { int frame_ready=0; size_t blocksize=MPEG_BUFFER_SIZE; static unsigned char bufz[MPEG_BUFFER_SIZE+1]; int invalid=0; do { mpg->state = mpeg2_parse(mpg->decoder); switch (mpg->state) { case STATE_BUFFER: blocksize = MPEG_BUFFER_SIZE; //if (mpg->whereami+blocksize > mpg->datasize-1) blocksize = mpg->datasize-mpg->whereami-1; //if (blocksize>0) if (mpg->stream_from_disk) { blocksize=fread(bufz, 1, MPEG_BUFFER_SIZE, mpg->f); mpeg2_buffer(mpg->decoder, bufz, bufz+blocksize); } else { mpeg2_buffer(mpg->decoder, mpg->data+mpg->whereami, mpg->data+mpg->whereami+blocksize); mpg->whereami += blocksize; } break; case STATE_SEQUENCE: mpeg2_convert(mpg->decoder, mpeg2convert_bgr32, NULL); break; case STATE_INVALID_END: { //FILE *foo; //foo=fopen("moi.txt", "a"); //fprintf(foo, "STATE_INVALID_END: %d, %i, %i\n", blocksize, (int)ftell(mpg->f), (int)filelength(fileno(mpg->f))); //fclose(foo); invalid=2; frame_ready++; break; } case STATE_SLICE: case STATE_END: frame_ready++; break; case STATE_INVALID: { //FILE *foo; //foo=fopen("moi.txt", "a"); //fprintf(foo, "STATE_INVALID: %d, %i, %i\n", blocksize, (int)ftell(mpg->f), (int)filelength(fileno(mpg->f))); //fclose(foo); invalid=3; frame_ready++; break; } case STATE_SEQUENCE_REPEATED: case STATE_GOP: case STATE_PICTURE: case STATE_SLICE_1ST: case STATE_PICTURE_2ND: break; default: break; } } while (!frame_ready); mpg->framenum++; if (invalid) mpg->end=invalid; // if (mpg->state==STATE_END || mpg->state==STATE_INVALID_END) mpg->end=1; //mpg->framenum=0, mpg->whereami=0; if (mpg->state==STATE_END) mpg->end=1; }
/***************************************************************************** * RunDecoder: the libmpeg2 decoder *****************************************************************************/ static picture_t *DecodeBlock( decoder_t *p_dec, block_t **pp_block ) { decoder_sys_t *p_sys = p_dec->p_sys; mpeg2_state_t state; picture_t *p_pic; block_t *p_block; if( !pp_block || !*pp_block ) return NULL; p_block = *pp_block; if( p_block->i_flags & (BLOCK_FLAG_DISCONTINUITY | BLOCK_FLAG_CORRUPTED) ) { cc_Flush( &p_sys->cc ); mpeg2_reset( p_sys->p_mpeg2dec, p_sys->p_info->sequence != NULL ); DpbClean( p_dec ); } while( 1 ) { state = mpeg2_parse( p_sys->p_mpeg2dec ); switch( state ) { case STATE_SEQUENCE: { /* */ DpbClean( p_dec ); /* */ mpeg2_custom_fbuf( p_sys->p_mpeg2dec, 1 ); /* Set the first 2 reference frames */ GetAR( p_dec ); for( int i = 0; i < 2; i++ ) { picture_t *p_picture = DpbNewPicture( p_dec ); if( !p_picture ) { /* Is it ok ? or do we need a reset ? */ block_Release( p_block ); return NULL; } PutPicture( p_dec, p_picture ); } if( p_sys->p_synchro ) decoder_SynchroRelease( p_sys->p_synchro ); p_sys->p_synchro = decoder_SynchroInit( p_dec, (uint32_t)((uint64_t)1001000000 * 27 / p_sys->p_info->sequence->frame_period) ); p_sys->b_after_sequence_header = true; break; } case STATE_GOP: /* There can be userdata in a GOP. It needs to be remembered for the next picture. */ if( p_sys->p_info->user_data_len > 2 ) { free( p_sys->p_gop_user_data ); p_sys->p_gop_user_data = calloc( p_sys->p_info->user_data_len, sizeof(uint8_t) ); if( p_sys->p_gop_user_data ) { p_sys->i_gop_user_data = p_sys->p_info->user_data_len; memcpy( p_sys->p_gop_user_data, p_sys->p_info->user_data, p_sys->p_info->user_data_len ); } } break; case STATE_PICTURE: { const mpeg2_info_t *p_info = p_sys->p_info; const mpeg2_picture_t *p_current = p_info->current_picture; mtime_t i_pts, i_dts; if( p_sys->b_after_sequence_header && (p_current->flags & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_P ) { /* Intra-slice refresh. Simulate a blank I picture. */ msg_Dbg( p_dec, "intra-slice refresh stream" ); decoder_SynchroNewPicture( p_sys->p_synchro, I_CODING_TYPE, 2, 0, 0, p_info->sequence->flags & SEQ_FLAG_LOW_DELAY ); decoder_SynchroDecode( p_sys->p_synchro ); decoder_SynchroEnd( p_sys->p_synchro, I_CODING_TYPE, 0 ); p_sys->b_slice_i = true; } p_sys->b_after_sequence_header = false; #ifdef PIC_FLAG_PTS i_pts = p_current->flags & PIC_FLAG_PTS ? ( ( p_current->pts == (uint32_t)p_sys->i_current_pts ) ? p_sys->i_current_pts : p_sys->i_previous_pts ) : 0; i_dts = 0; /* Hack to handle demuxers which only have DTS timestamps */ if( !i_pts && !p_block->i_pts && p_block->i_dts > 0 ) { if( p_info->sequence->flags & SEQ_FLAG_LOW_DELAY || (p_current->flags & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_B ) { i_pts = p_block->i_dts; } } p_block->i_pts = p_block->i_dts = 0; /* End hack */ #else /* New interface */ i_pts = p_current->flags & PIC_FLAG_TAGS ? ( ( p_current->tag == (uint32_t)p_sys->i_current_pts ) ? p_sys->i_current_pts : p_sys->i_previous_pts ) : 0; i_dts = p_current->flags & PIC_FLAG_TAGS ? ( ( p_current->tag2 == (uint32_t)p_sys->i_current_dts ) ? p_sys->i_current_dts : p_sys->i_previous_dts ) : 0; #endif /* If nb_fields == 1, it is a field picture, and it will be * followed by another field picture for which we won't call * decoder_SynchroNewPicture() because this would have other * problems, so we take it into account here. * This kind of sucks, but I didn't think better. --Meuuh */ decoder_SynchroNewPicture( p_sys->p_synchro, p_current->flags & PIC_MASK_CODING_TYPE, p_current->nb_fields == 1 ? 2 : p_current->nb_fields, i_pts, i_dts, p_info->sequence->flags & SEQ_FLAG_LOW_DELAY ); bool b_skip = false; if( !p_dec->b_pace_control && !p_sys->b_preroll && !(p_sys->b_slice_i && ((p_current->flags & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_P)) && !decoder_SynchroChoose( p_sys->p_synchro, p_current->flags & PIC_MASK_CODING_TYPE, /*p_sys->p_vout->render_time*/ 0 /*FIXME*/, p_info->sequence->flags & SEQ_FLAG_LOW_DELAY ) ) { b_skip = true; } p_pic = NULL; if( !b_skip ) p_pic = DpbNewPicture( p_dec ); if( b_skip || !p_pic ) { mpeg2_skip( p_sys->p_mpeg2dec, 1 ); p_sys->b_skip = true; decoder_SynchroTrash( p_sys->p_synchro ); PutPicture( p_dec, NULL ); if( !b_skip ) { block_Release( p_block ); return NULL; } } else { mpeg2_skip( p_sys->p_mpeg2dec, 0 ); p_sys->b_skip = false; decoder_SynchroDecode( p_sys->p_synchro ); PutPicture( p_dec, p_pic ); } if( p_info->user_data_len > 2 || p_sys->i_gop_user_data > 2 ) { p_sys->i_cc_pts = i_pts; p_sys->i_cc_dts = i_dts; if( (p_current->flags & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_P ) p_sys->i_cc_flags = BLOCK_FLAG_TYPE_P; else if( (p_current->flags & PIC_MASK_CODING_TYPE) == PIC_FLAG_CODING_TYPE_B ) p_sys->i_cc_flags = BLOCK_FLAG_TYPE_B; else p_sys->i_cc_flags = BLOCK_FLAG_TYPE_I; if( p_sys->i_gop_user_data > 2 ) { /* We now have picture info for any cached user_data out of the gop */ cc_Extract( &p_sys->cc, &p_sys->p_gop_user_data[0], p_sys->i_gop_user_data ); p_sys->i_gop_user_data = 0; } /* Extract the CC from the user_data of the picture */ if( p_info->user_data_len > 2 ) cc_Extract( &p_sys->cc, &p_info->user_data[0], p_info->user_data_len ); } } break; case STATE_BUFFER: if( !p_block->i_buffer ) { block_Release( p_block ); return NULL; } if( (p_block->i_flags & (BLOCK_FLAG_DISCONTINUITY | BLOCK_FLAG_CORRUPTED)) && p_sys->p_synchro && p_sys->p_info->sequence && p_sys->p_info->sequence->width != (unsigned)-1 ) { decoder_SynchroReset( p_sys->p_synchro ); if( p_sys->p_info->current_fbuf != NULL && p_sys->p_info->current_fbuf->id != NULL ) { p_sys->b_garbage_pic = true; } if( p_sys->b_slice_i ) { decoder_SynchroNewPicture( p_sys->p_synchro, I_CODING_TYPE, 2, 0, 0, p_sys->p_info->sequence->flags & SEQ_FLAG_LOW_DELAY ); decoder_SynchroDecode( p_sys->p_synchro ); decoder_SynchroEnd( p_sys->p_synchro, I_CODING_TYPE, 0 ); } } if( p_block->i_flags & BLOCK_FLAG_PREROLL ) { p_sys->b_preroll = true; } else if( p_sys->b_preroll ) { p_sys->b_preroll = false; if( p_sys->p_synchro ) decoder_SynchroReset( p_sys->p_synchro ); } #ifdef PIC_FLAG_PTS if( p_block->i_pts ) { mpeg2_pts( p_sys->p_mpeg2dec, (uint32_t)p_block->i_pts ); #else /* New interface */ if( p_block->i_pts || p_block->i_dts ) { mpeg2_tag_picture( p_sys->p_mpeg2dec, (uint32_t)p_block->i_pts, (uint32_t)p_block->i_dts ); #endif p_sys->i_previous_pts = p_sys->i_current_pts; p_sys->i_current_pts = p_block->i_pts; p_sys->i_previous_dts = p_sys->i_current_dts; p_sys->i_current_dts = p_block->i_dts; } mpeg2_buffer( p_sys->p_mpeg2dec, p_block->p_buffer, p_block->p_buffer + p_block->i_buffer ); p_block->i_buffer = 0; break; #if MPEG2_RELEASE >= MPEG2_VERSION (0, 5, 0) case STATE_SEQUENCE_MODIFIED: GetAR( p_dec ); break; #endif case STATE_PICTURE_2ND: p_sys->b_second_field = true; break; case STATE_INVALID_END: case STATE_END: case STATE_SLICE: p_pic = NULL; if( p_sys->p_info->display_fbuf && p_sys->p_info->display_fbuf->id ) { p_pic = p_sys->p_info->display_fbuf->id; DpbDisplayPicture( p_dec, p_pic ); decoder_SynchroEnd( p_sys->p_synchro, p_sys->p_info->display_picture->flags & PIC_MASK_CODING_TYPE, p_sys->b_garbage_pic ); p_pic->date = decoder_SynchroDate( p_sys->p_synchro ); if( p_sys->b_garbage_pic ) p_pic->date = 0; /* ??? */ p_sys->b_garbage_pic = false; } if( p_sys->p_info->discard_fbuf && p_sys->p_info->discard_fbuf->id ) { DpbUnlinkPicture( p_dec, p_sys->p_info->discard_fbuf->id ); } /* For still frames */ if( state == STATE_END && p_pic ) p_pic->b_force = true; if( p_pic ) { /* Avoid frames with identical timestamps. * Especially needed for still frames in DVD menus. */ if( p_sys->i_last_frame_pts == p_pic->date ) p_pic->date++; p_sys->i_last_frame_pts = p_pic->date; return p_pic; } break; case STATE_INVALID: { msg_Err( p_dec, "invalid picture encountered" ); /* I don't think we have anything to do, but well without * docs ... */ break; } default: break; } } /* Never reached */ return NULL; } /***************************************************************************** * CloseDecoder: libmpeg2 decoder destruction *****************************************************************************/ static void CloseDecoder( vlc_object_t *p_this ) { decoder_t *p_dec = (decoder_t *)p_this; decoder_sys_t *p_sys = p_dec->p_sys; DpbClean( p_dec ); free( p_sys->p_gop_user_data ); if( p_sys->p_synchro ) decoder_SynchroRelease( p_sys->p_synchro ); if( p_sys->p_mpeg2dec ) mpeg2_close( p_sys->p_mpeg2dec ); free( p_sys ); }
static int mpeg2dec_decode (codec_data_t *ptr, frame_timestamp_t *pts, int from_rtp, int *sync_frame, uint8_t *buffer, uint32_t buflen, void *ud) { mpeg2dec_codec_t *mpeg2dec = (mpeg2dec_codec_t *)ptr; mpeg2dec_t *decoder; const mpeg2_info_t *info; mpeg2_state_t state; uint64_t ts = pts->msec_timestamp; decoder = mpeg2dec->m_decoder; #if 0 mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "ts buflen %d "U64, buflen, ts); //if (mpeg2dec->m_did_pause != 0) { for (uint32_t ix = 0; ix < buflen + 3; ix++) { if (buffer[ix] == 0 && buffer[ix + 1] == 0 && buffer[ix + 2] == 1) { mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "index %d - value %x %x %x", ix, buffer[ix + 3], buffer[ix + 4], buffer[ix + 5]); } } } #endif info = mpeg2_info(decoder); bool passed_buffer = false; bool finished_buffer = false; do { state = mpeg2_parse(decoder); //mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "state %d", state); const mpeg2_sequence_t *sequence; sequence = info->sequence; switch (state) { case STATE_BUFFER: if (passed_buffer == false) { mpeg2_buffer(decoder, buffer, buffer + buflen); passed_buffer = true; } else { finished_buffer = true; } break; case STATE_SEQUENCE: { if (mpeg2dec->m_video_initialized == 0) { mpeg2dec->m_h = sequence->height; mpeg2dec->m_w = sequence->width; int have_mpeg2; uint32_t height; uint32_t width; double frame_rate; double bitrate; double aspect_ratio; uint8_t profile; if (MP4AV_Mpeg3ParseSeqHdr(buffer, buflen, &have_mpeg2, &height, &width, &frame_rate, &bitrate, &aspect_ratio, &profile) < 0) { mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "pix w %u pix h %u", sequence->pixel_width, sequence->pixel_height); aspect_ratio = sequence->pixel_width; aspect_ratio *= mpeg2dec->m_w; aspect_ratio /= (double)(sequence->pixel_height * mpeg2dec->m_h); } mpeg2dec->pts_convert.frame_rate = frame_rate; mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "%ux%u aspect %g", mpeg2dec->m_w, mpeg2dec->m_h, aspect_ratio); mpeg2dec->m_vft->video_configure(mpeg2dec->m_ifptr, mpeg2dec->m_w, mpeg2dec->m_h, VIDEO_FORMAT_YUV, aspect_ratio); mpeg2dec->m_video_initialized = 1; } break; } case STATE_SLICE: case STATE_END: case STATE_INVALID_END: // INVALID_END state means they found a new sequence header, with a // new size #ifdef DEBUG_MPEG2DEC_FRAME mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "frame "U64" decoded", mpeg2dec->cached_ts); #endif if (info->display_fbuf) { mpeg2dec->m_vft->video_have_frame(mpeg2dec->m_ifptr, info->display_fbuf->buf[0], info->display_fbuf->buf[1], info->display_fbuf->buf[2], sequence->width, sequence->chroma_width, mpeg2dec->m_cached_ts_invalid ? ts : mpeg2dec->cached_ts); } break; case STATE_SEQUENCE_REPEATED: // we don't care here case STATE_GOP: case STATE_PICTURE: case STATE_SLICE_1ST: case STATE_PICTURE_2ND: case STATE_INVALID: // default: break; } } while (finished_buffer == false); mpeg2dec->m_cached_ts_invalid = false; if (pts->timestamp_is_pts) { if (info->current_picture == NULL || mpeg3_find_dts_from_pts(&mpeg2dec->pts_convert, ts, info->current_picture->flags & PIC_MASK_CODING_TYPE, info->current_picture->temporal_reference, &mpeg2dec->cached_ts) < 0) { mpeg2dec->m_cached_ts_invalid = true; } #if 0 mpeg2dec->m_vft->log_msg(LOG_DEBUG, "mpeg2dec", "pts "U64" dts "U64" temp %u type %u", pts->msec_timestamp, mpeg2dec->cached_ts, info->current_picture->temporal_reference, info->current_picture->flags & PIC_MASK_CODING_TYPE); #endif } else { mpeg2dec->cached_ts = ts; } return (buflen); }