void CMPEG2HeaderParser::ParseMPEG2Header(const BYTE *pData, size_t length) { if (length < 16) return; GetBitContext gb; const uint8_t *start = pData; const uint8_t *end = start + length; const uint8_t *next = nullptr; int size; start = find_next_marker(start, end); next = start; for(; next < end; start = next) { next = find_next_marker(start + 4, end); size = (int)(next - start - 4); if(size <= 0) continue; init_get_bits(&gb, start + 4, (size - 4) * 8); switch(AV_RB32(start)) { case SEQ_START_CODE: MPEG2ParseSequenceHeader(&gb); break; case EXT_START_CODE: MPEG2ParseExtHeader(&gb); break; } } }
static void vc1_extract_headers(AVCodecParserContext *s, AVCodecContext *avctx, const uint8_t *buf, int buf_size) { VC1ParseContext *vpc = (VC1ParseContext *)s->priv_data; GetBitContext gb; const uint8_t *start, *end, *next; uint8_t *buf2 = (uint8_t *)av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE); vpc->v.s.avctx = avctx; vpc->v.parse_only = 1; next = buf; s->repeat_pict = 0; for(start = buf, end = buf + buf_size; next < end; start = next){ int buf2_size, size; next = find_next_marker(start + 4, end); size = next - start - 4; buf2_size = vc1_unescape_buffer(start + 4, size, buf2); init_get_bits(&gb, buf2, buf2_size * 8); if(size <= 0) continue; switch(AV_RB32(start)){ case VC1_CODE_SEQHDR: vc1_decode_sequence_header(avctx, &vpc->v, &gb); break; case VC1_CODE_ENTRYPOINT: vc1_decode_entry_point(avctx, &vpc->v, &gb); break; case VC1_CODE_FRAME: if(vpc->v.profile < PROFILE_ADVANCED) vc1_parse_frame_header (&vpc->v, &gb); else vc1_parse_frame_header_adv(&vpc->v, &gb); /* keep AV_PICTURE_TYPE_BI internal to VC1 */ if (vpc->v.s.pict_type == AV_PICTURE_TYPE_BI) s->pict_type = AV_PICTURE_TYPE_B; else s->pict_type = vpc->v.s.pict_type; if (avctx->ticks_per_frame > 1){ // process pulldown flags s->repeat_pict = 1; // Pulldown flags are only valid when 'broadcast' has been set. // So ticks_per_frame will be 2 if (vpc->v.rff){ // repeat field s->repeat_pict = 2; }else if (vpc->v.rptfrm){ // repeat frames s->repeat_pict = vpc->v.rptfrm * 2 + 1; } } break; } } av_free(buf2); }
void CVC1HeaderParser::ParseVC1Header(const BYTE *pData, size_t length, AVCodecID codec) { GetBitContext gb; if (codec == AV_CODEC_ID_VC1) { if (length < 16) return; const uint8_t *start = pData; const uint8_t *end = start + length; const uint8_t *next = nullptr; int size, buf2_size; uint8_t *buf2; buf2 = (uint8_t *)av_mallocz(length + FF_INPUT_BUFFER_PADDING_SIZE); start = find_next_marker(start, end); next = start; for(; next < end; start = next) { next = find_next_marker(start + 4, end); size = (int)(next - start - 4); if(size <= 0) continue; buf2_size = vc1_unescape_buffer(start + 4, size, buf2); init_get_bits(&gb, buf2, buf2_size * 8); switch(AV_RB32(start)) { case VC1_CODE_SEQHDR: VC1ParseSequenceHeader(&gb); break; } } av_freep(&buf2); } else if (codec == AV_CODEC_ID_WMV3) { if (length < 4) return; init_get_bits(&gb, pData, length * 8); VC1ParseSequenceHeader(&gb); } }
static void vc1_extract_headers(AVCodecParserContext *s, AVCodecContext *avctx, const uint8_t *buf, int buf_size) { VC1ParseContext *vpc = s->priv_data; GetBitContext gb; const uint8_t *start, *end, *next; uint8_t *buf2 = av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE); vpc->v.s.avctx = avctx; vpc->v.parse_only = 1; next = buf; for(start = buf, end = buf + buf_size; next < end; start = next) { int buf2_size, size; next = find_next_marker(start + 4, end); size = next - start - 4; buf2_size = vc1_unescape_buffer(start + 4, size, buf2); init_get_bits(&gb, buf2, buf2_size * 8); if(size <= 0) continue; switch(AV_RB32(start)) { case VC1_CODE_SEQHDR: vc1_decode_sequence_header(avctx, &vpc->v, &gb); break; case VC1_CODE_ENTRYPOINT: vc1_decode_entry_point(avctx, &vpc->v, &gb); break; case VC1_CODE_FRAME: if(vpc->v.profile < PROFILE_ADVANCED) vc1_parse_frame_header (&vpc->v, &gb); else vc1_parse_frame_header_adv(&vpc->v, &gb); /* keep FF_BI_TYPE internal to VC1 */ if (vpc->v.s.pict_type == FF_BI_TYPE) s->pict_type = FF_B_TYPE; else s->pict_type = vpc->v.s.pict_type; break; } } av_free(buf2); }
static AVPictureType parse_picture_type(const uint8_t *buf, int buflen, CVC1HeaderParser *vc1Header) { AVPictureType pictype = AV_PICTURE_TYPE_NONE; int skipped = 0; const BYTE *framestart = buf; if (IS_MARKER(AV_RB32(buf))) { framestart = NULL; const BYTE *start, *end, *next; next = buf; for (start = buf, end = buf + buflen; next < end; start = next) { if (AV_RB32(start) == VC1_CODE_FRAME) { framestart = start + 4; break; } next = find_next_marker(start + 4, end); } } if (framestart) { GetBitContext gb; init_get_bits(&gb, framestart, (buflen - (framestart-buf))*8); if (vc1Header->hdr.profile == PROFILE_ADVANCED) { int fcm = PROGRESSIVE; if (vc1Header->hdr.interlaced) fcm = decode012(&gb); if (fcm == ILACE_FIELD) { int fptype = get_bits(&gb, 3); pictype = (fptype & 2) ? AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_I; if (fptype & 4) // B-picture pictype = (fptype & 2) ? AV_PICTURE_TYPE_BI : AV_PICTURE_TYPE_B; } else { switch (get_unary(&gb, 0, 4)) { case 0: pictype = AV_PICTURE_TYPE_P; break; case 1: pictype = AV_PICTURE_TYPE_B; break; case 2: pictype = AV_PICTURE_TYPE_I; break; case 3: pictype = AV_PICTURE_TYPE_BI; break; case 4: pictype = AV_PICTURE_TYPE_P; // skipped pic skipped = 1; break; } } } else { if (vc1Header->hdr.finterp) skip_bits1(&gb); skip_bits(&gb, 2); // framecnt if (vc1Header->hdr.rangered) skip_bits1(&gb); int pic = get_bits1(&gb); if (vc1Header->hdr.bframes) { if (!pic) { if (get_bits1(&gb)) { pictype = AV_PICTURE_TYPE_I; } else { pictype = AV_PICTURE_TYPE_B; } } else { pictype = AV_PICTURE_TYPE_P; } } else { pictype = pic ? AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_I; } } } return pictype; }
int av_vc1_decode_frame(AVCodecContext *avctx, const uint8_t *buf, int buf_size, int *nFrameSize) { int n_slices = 0, i; VC1Context *v = avctx->priv_data; MpegEncContext *s = &v->s; uint8_t *buf2 = NULL; const uint8_t *buf_start = buf, *buf_start_second_field = NULL; struct { uint8_t *buf; GetBitContext gb; int mby_start; } *slices = NULL, *tmp; v->second_field = 0; *nFrameSize = 0; /* for advanced profile we may need to parse and unescape data */ if (avctx->codec_id == AV_CODEC_ID_VC1 || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) { int buf_size2 = 0; buf2 = av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE); if (IS_MARKER(AV_RB32(buf))) { /* frame starts with marker and needs to be parsed */ const uint8_t *start, *end, *next; int size; next = buf; for (start = buf, end = buf + buf_size; next < end; start = next) { next = find_next_marker(start + 4, end); size = next - start - 4; if (size <= 0) continue; switch (AV_RB32(start)) { case VC1_CODE_FRAME: buf_start = start; buf_size2 = vc1_unescape_buffer(start + 4, size, buf2); break; case VC1_CODE_FIELD: { int buf_size3; buf_start_second_field = start; slices = av_realloc(slices, sizeof(*slices) * (n_slices + 1)); if (!slices) goto err; slices[n_slices].buf = av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE); if (!slices[n_slices].buf) goto err; buf_size3 = vc1_unescape_buffer(start + 4, size, slices[n_slices].buf); init_get_bits(&slices[n_slices].gb, slices[n_slices].buf, buf_size3 << 3); /* assuming that the field marker is at the exact middle, hope it's correct */ slices[n_slices].mby_start = s->mb_height >> 1; n_slices++; break; } case VC1_CODE_ENTRYPOINT: /* it should be before frame data */ buf_size2 = vc1_unescape_buffer(start + 4, size, buf2); init_get_bits(&s->gb, buf2, buf_size2 * 8); ff_vc1_decode_entry_point(avctx, v, &s->gb); break; case VC1_CODE_SLICE: { int buf_size3; slices = av_realloc(slices, sizeof(*slices) * (n_slices + 1)); if (!slices) goto err; slices[n_slices].buf = av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE); if (!slices[n_slices].buf) goto err; buf_size3 = vc1_unescape_buffer(start + 4, size, slices[n_slices].buf); init_get_bits(&slices[n_slices].gb, slices[n_slices].buf, buf_size3 << 3); slices[n_slices].mby_start = get_bits(&slices[n_slices].gb, 9); n_slices++; break; } } } } else if (v->interlace && ((buf[0] & 0xC0) == 0xC0)) { /* WVC1 interlaced stores both fields divided by marker */
goto err; buf_size3 = vc1_unescape_buffer(start + 4, size, slices[n_slices].buf); init_get_bits(&slices[n_slices].gb, slices[n_slices].buf, buf_size3 << 3); slices[n_slices].mby_start = get_bits(&slices[n_slices].gb, 9); n_slices++; break; } } } } else if (v->interlace && ((buf[0] & 0xC0) == 0xC0)) { /* WVC1 interlaced stores both fields divided by marker */ const uint8_t *divider; int buf_size3; divider = find_next_marker(buf, buf + buf_size); if ((divider == (buf + buf_size)) || AV_RB32(divider) != VC1_CODE_FIELD) { av_log(avctx, AV_LOG_ERROR, "Error in WVC1 interlaced frame\n"); goto err; } else { /* found field marker, unescape second field */ buf_start_second_field = divider; tmp = av_realloc(slices, sizeof(*slices) * (n_slices + 1)); if (!tmp) goto err; slices = tmp; slices[n_slices].buf = av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE); if (!slices[n_slices].buf) goto err; buf_size3 = vc1_unescape_buffer(divider + 4, buf + buf_size - divider - 4, slices[n_slices].buf); init_get_bits(&slices[n_slices].gb, slices[n_slices].buf, buf_size3 << 3);