示例#1
0
static int decode_unregistered_user_data(H264Context *h, int size)
{
    uint8_t user_data[16 + 256];
    int e, build, i;

    if (size < 16)
        return AVERROR_INVALIDDATA;

    for (i = 0; i < sizeof(user_data) - 1 && i < size; i++)
        user_data[i] = get_bits(&h->gb, 8);

    user_data[i] = 0;
    e = sscanf(user_data + 16, "x264 - core %d", &build);
    if (e == 1 && build > 0)
        h->x264_build = build;

    if (h->avctx->debug & FF_DEBUG_BUGS)
        av_log(h->avctx, AV_LOG_DEBUG, "user data:\"%s\"\n", user_data + 16);

    for (; i < size; i++)
        skip_bits(&h->gb, 8);

    return 0;
}
示例#2
0
/**
 * decodes the group of blocks / video packet header.
 * @return <0 if no resync found
 */
static int ff_h261_resync(H261Context *h){
    MpegEncContext * const s = &h->s;
    int left, ret;

    if ( h->gob_start_code_skipped ){
        ret= h261_decode_gob_header(h);
        if(ret>=0)
            return 0;
    }
    else{
        if(show_bits(&s->gb, 15)==0){
            ret= h261_decode_gob_header(h);
            if(ret>=0)
                return 0;
        }
        //OK, it is not where it is supposed to be ...
        s->gb= s->last_resync_gb;
        align_get_bits(&s->gb);
        left= s->gb.size_in_bits - get_bits_count(&s->gb);

        for(;left>15+1+4+5; left-=8){
            if(show_bits(&s->gb, 15)==0){
                GetBitContext bak= s->gb;

                ret= h261_decode_gob_header(h);
                if(ret>=0)
                    return 0;

                s->gb= bak;
            }
            skip_bits(&s->gb, 8);
        }
    }

    return -1;
}
示例#3
0
static int decode_user_data_itu_t_t35(H264Context *h, int size)
{
    uint32_t user_identifier;
    int dtg_active_format;

    if (size < 7)
        return -1;
    size -= 7;

    skip_bits(&h->gb, 8);   // country_code
    skip_bits(&h->gb, 16);  // provider_code
    user_identifier = get_bits_long(&h->gb, 32);

    switch (user_identifier) {
        case 0x44544731:    // "DTG1" - AFD_data
            if (size < 1)
                return -1;
            skip_bits(&h->gb, 1);
            if (get_bits(&h->gb, 1)) {
                skip_bits(&h->gb, 6);
                if (size < 2)
                    return -1;
                skip_bits(&h->gb, 4);
                dtg_active_format = get_bits(&h->gb, 4);
                h->avctx->dtg_active_format = dtg_active_format;
            } else {
                skip_bits(&h->gb, 6);
            }
            break;
        default:
            skip_bits(&h->gb, size * 8);
            break;
    }

    return 0;
}
示例#4
0
static av_cold int mpc8_decode_init(AVCodecContext * avctx)
{
    int i;
    MPCContext *c = avctx->priv_data;
    GetBitContext gb;
    static int vlc_initialized = 0;

    static VLC_TYPE band_table[542][2];
    static VLC_TYPE q1_table[520][2];
    static VLC_TYPE q9up_table[524][2];
    static VLC_TYPE scfi0_table[1 << MPC8_SCFI0_BITS][2];
    static VLC_TYPE scfi1_table[1 << MPC8_SCFI1_BITS][2];
    static VLC_TYPE dscf0_table[560][2];
    static VLC_TYPE dscf1_table[598][2];
    static VLC_TYPE q3_0_table[512][2];
    static VLC_TYPE q3_1_table[516][2];
    static VLC_TYPE codes_table[5708][2];

    if(avctx->extradata_size < 2){
        av_log(avctx, AV_LOG_ERROR, "Too small extradata size (%i)!\n", avctx->extradata_size);
        return -1;
    }
    memset(c->oldDSCF, 0, sizeof(c->oldDSCF));
    av_lfg_init(&c->rnd, 0xDEADBEEF);
    dsputil_init(&c->dsp, avctx);

    ff_mpc_init();

    init_get_bits(&gb, avctx->extradata, 16);

    skip_bits(&gb, 3);//sample rate
    c->maxbands = get_bits(&gb, 5) + 1;
    skip_bits(&gb, 4);//channels
    c->MSS = get_bits1(&gb);
    c->frames = 1 << (get_bits(&gb, 3) * 2);

    avctx->sample_fmt = SAMPLE_FMT_S16;
    avctx->channel_layout = (avctx->channels==2) ? CH_LAYOUT_STEREO : CH_LAYOUT_MONO;

    if(vlc_initialized) return 0;
    av_log(avctx, AV_LOG_DEBUG, "Initing VLC\n");

    band_vlc.table = band_table;
    band_vlc.table_allocated = 542;
    init_vlc(&band_vlc, MPC8_BANDS_BITS, MPC8_BANDS_SIZE,
             mpc8_bands_bits,  1, 1,
             mpc8_bands_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);

    q1_vlc.table = q1_table;
    q1_vlc.table_allocated = 520;
    init_vlc(&q1_vlc, MPC8_Q1_BITS, MPC8_Q1_SIZE,
             mpc8_q1_bits,  1, 1,
             mpc8_q1_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);
    q9up_vlc.table = q9up_table;
    q9up_vlc.table_allocated = 524;
    init_vlc(&q9up_vlc, MPC8_Q9UP_BITS, MPC8_Q9UP_SIZE,
             mpc8_q9up_bits,  1, 1,
             mpc8_q9up_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);

    scfi_vlc[0].table = scfi0_table;
    scfi_vlc[0].table_allocated = 1 << MPC8_SCFI0_BITS;
    init_vlc(&scfi_vlc[0], MPC8_SCFI0_BITS, MPC8_SCFI0_SIZE,
             mpc8_scfi0_bits,  1, 1,
             mpc8_scfi0_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);
    scfi_vlc[1].table = scfi1_table;
    scfi_vlc[1].table_allocated = 1 << MPC8_SCFI1_BITS;
    init_vlc(&scfi_vlc[1], MPC8_SCFI1_BITS, MPC8_SCFI1_SIZE,
             mpc8_scfi1_bits,  1, 1,
             mpc8_scfi1_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);

    dscf_vlc[0].table = dscf0_table;
    dscf_vlc[0].table_allocated = 560;
    init_vlc(&dscf_vlc[0], MPC8_DSCF0_BITS, MPC8_DSCF0_SIZE,
             mpc8_dscf0_bits,  1, 1,
             mpc8_dscf0_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);
    dscf_vlc[1].table = dscf1_table;
    dscf_vlc[1].table_allocated = 598;
    init_vlc(&dscf_vlc[1], MPC8_DSCF1_BITS, MPC8_DSCF1_SIZE,
             mpc8_dscf1_bits,  1, 1,
             mpc8_dscf1_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);

    q3_vlc[0].table = q3_0_table;
    q3_vlc[0].table_allocated = 512;
    init_vlc_sparse(&q3_vlc[0], MPC8_Q3_BITS, MPC8_Q3_SIZE,
             mpc8_q3_bits,  1, 1,
             mpc8_q3_codes, 1, 1,
             mpc8_q3_syms,  1, 1, INIT_VLC_USE_NEW_STATIC);
    q3_vlc[1].table = q3_1_table;
    q3_vlc[1].table_allocated = 516;
    init_vlc_sparse(&q3_vlc[1], MPC8_Q4_BITS, MPC8_Q4_SIZE,
             mpc8_q4_bits,  1, 1,
             mpc8_q4_codes, 1, 1,
             mpc8_q4_syms,  1, 1, INIT_VLC_USE_NEW_STATIC);

    for(i = 0; i < 2; i++){
        res_vlc[i].table = &codes_table[vlc_offsets[0+i]];
        res_vlc[i].table_allocated = vlc_offsets[1+i] - vlc_offsets[0+i];
        init_vlc(&res_vlc[i], MPC8_RES_BITS, MPC8_RES_SIZE,
                 &mpc8_res_bits[i],  1, 1,
                 &mpc8_res_codes[i], 1, 1, INIT_VLC_USE_NEW_STATIC);

        q2_vlc[i].table = &codes_table[vlc_offsets[2+i]];
        q2_vlc[i].table_allocated = vlc_offsets[3+i] - vlc_offsets[2+i];
        init_vlc(&q2_vlc[i], MPC8_Q2_BITS, MPC8_Q2_SIZE,
                 &mpc8_q2_bits[i],  1, 1,
                 &mpc8_q2_codes[i], 1, 1, INIT_VLC_USE_NEW_STATIC);

        quant_vlc[0][i].table = &codes_table[vlc_offsets[4+i]];
        quant_vlc[0][i].table_allocated = vlc_offsets[5+i] - vlc_offsets[4+i];
        init_vlc(&quant_vlc[0][i], MPC8_Q5_BITS, MPC8_Q5_SIZE,
                 &mpc8_q5_bits[i],  1, 1,
                 &mpc8_q5_codes[i], 1, 1, INIT_VLC_USE_NEW_STATIC);
        quant_vlc[1][i].table = &codes_table[vlc_offsets[6+i]];
        quant_vlc[1][i].table_allocated = vlc_offsets[7+i] - vlc_offsets[6+i];
        init_vlc(&quant_vlc[1][i], MPC8_Q6_BITS, MPC8_Q6_SIZE,
                 &mpc8_q6_bits[i],  1, 1,
                 &mpc8_q6_codes[i], 1, 1, INIT_VLC_USE_NEW_STATIC);
        quant_vlc[2][i].table = &codes_table[vlc_offsets[8+i]];
        quant_vlc[2][i].table_allocated = vlc_offsets[9+i] - vlc_offsets[8+i];
        init_vlc(&quant_vlc[2][i], MPC8_Q7_BITS, MPC8_Q7_SIZE,
                 &mpc8_q7_bits[i],  1, 1,
                 &mpc8_q7_codes[i], 1, 1, INIT_VLC_USE_NEW_STATIC);
        quant_vlc[3][i].table = &codes_table[vlc_offsets[10+i]];
        quant_vlc[3][i].table_allocated = vlc_offsets[11+i] - vlc_offsets[10+i];
        init_vlc(&quant_vlc[3][i], MPC8_Q8_BITS, MPC8_Q8_SIZE,
                 &mpc8_q8_bits[i],  1, 1,
                 &mpc8_q8_codes[i], 1, 1, INIT_VLC_USE_NEW_STATIC);
    }
    vlc_initialized = 1;
    return 0;
}
示例#5
0
/* don't understand why they choose a different header ! */
int ff_intel_h263_decode_picture_header(MpegEncContext *s)
{
    int format;

    /* picture header */
    if (get_bits_long(&s->gb, 22) != 0x20) {
        av_log(s->avctx, AV_LOG_ERROR, "Bad picture start code\n");
        return -1;
    }
    s->picture_number = get_bits(&s->gb, 8); /* picture timestamp */

    if (get_bits1(&s->gb) != 1) {
        av_log(s->avctx, AV_LOG_ERROR, "Bad marker\n");
        return -1;      /* marker */
    }
    if (get_bits1(&s->gb) != 0) {
        av_log(s->avctx, AV_LOG_ERROR, "Bad H263 id\n");
        return -1;      /* h263 id */
    }
    skip_bits1(&s->gb);         /* split screen off */
    skip_bits1(&s->gb);         /* camera  off */
    skip_bits1(&s->gb);         /* freeze picture release off */

    format = get_bits(&s->gb, 3);
    if (format == 0 || format == 6) {
        av_log(s->avctx, AV_LOG_ERROR, "Intel H263 free format not supported\n");
        return -1;
    }
    s->h263_plus = 0;

    s->pict_type = AV_PICTURE_TYPE_I + get_bits1(&s->gb);

    s->h263_long_vectors = get_bits1(&s->gb);

    if (get_bits1(&s->gb) != 0) {
        av_log(s->avctx, AV_LOG_ERROR, "SAC not supported\n");
        return -1;      /* SAC: off */
    }
    s->obmc= get_bits1(&s->gb);
    s->unrestricted_mv = s->obmc || s->h263_long_vectors;
    s->pb_frame = get_bits1(&s->gb);

    if (format < 6) {
        s->width = ff_h263_format[format][0];
        s->height = ff_h263_format[format][1];
        s->avctx->sample_aspect_ratio.num = 12;
        s->avctx->sample_aspect_ratio.den = 11;
    } else {
        format = get_bits(&s->gb, 3);
        if(format == 0 || format == 7){
            av_log(s->avctx, AV_LOG_ERROR, "Wrong Intel H263 format\n");
            return -1;
        }
        if(get_bits(&s->gb, 2))
            av_log(s->avctx, AV_LOG_ERROR, "Bad value for reserved field\n");
        s->loop_filter = get_bits1(&s->gb) * !s->avctx->lowres;
        if(get_bits1(&s->gb))
            av_log(s->avctx, AV_LOG_ERROR, "Bad value for reserved field\n");
        if(get_bits1(&s->gb))
            s->pb_frame = 2;
        if(get_bits(&s->gb, 5))
            av_log(s->avctx, AV_LOG_ERROR, "Bad value for reserved field\n");
        if(get_bits(&s->gb, 5) != 1)
            av_log(s->avctx, AV_LOG_ERROR, "Invalid marker\n");
    }
    if(format == 6){
        int ar = get_bits(&s->gb, 4);
        skip_bits(&s->gb, 9); // display width
        skip_bits1(&s->gb);
        skip_bits(&s->gb, 9); // display height
        if(ar == 15){
            s->avctx->sample_aspect_ratio.num = get_bits(&s->gb, 8); // aspect ratio - width
            s->avctx->sample_aspect_ratio.den = get_bits(&s->gb, 8); // aspect ratio - height
        } else {
            s->avctx->sample_aspect_ratio = ff_h263_pixel_aspect[ar];
        }
        if (s->avctx->sample_aspect_ratio.num == 0)
            av_log(s->avctx, AV_LOG_ERROR, "Invalid aspect ratio.\n");
    }

    s->chroma_qscale= s->qscale = get_bits(&s->gb, 5);
    skip_bits1(&s->gb); /* Continuous Presence Multipoint mode: off */

    if(s->pb_frame){
        skip_bits(&s->gb, 3); //temporal reference for B-frame
        skip_bits(&s->gb, 2); //dbquant
    }

    /* PEI */
    while (get_bits1(&s->gb) != 0) {
        skip_bits(&s->gb, 8);
    }
    s->f_code = 1;

    s->y_dc_scale_table=
    s->c_dc_scale_table= ff_mpeg1_dc_scale_table;

    ff_h263_show_pict_info(s);

    return 0;
}
示例#6
0
static int mjpegb_decode_frame(AVCodecContext *avctx,
                              void *data, int *got_frame,
                              AVPacket *avpkt)
{
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    MJpegDecodeContext *s = avctx->priv_data;
    const uint8_t *buf_end, *buf_ptr;
    GetBitContext hgb; /* for the header */
    uint32_t dqt_offs, dht_offs, sof_offs, sos_offs, second_field_offs;
    uint32_t field_size, sod_offs;
    int ret;

    buf_ptr = buf;
    buf_end = buf + buf_size;
    s->got_picture = 0;

read_header:
    /* reset on every SOI */
    s->restart_interval = 0;
    s->restart_count = 0;
    s->mjpb_skiptosod = 0;

    if (buf_end - buf_ptr >= 1 << 28)
        return AVERROR_INVALIDDATA;

    init_get_bits(&hgb, buf_ptr, /*buf_size*/(buf_end - buf_ptr)*8);

    skip_bits(&hgb, 32); /* reserved zeros */

    if (get_bits_long(&hgb, 32) != MKBETAG('m','j','p','g')) {
        av_log(avctx, AV_LOG_WARNING, "not mjpeg-b (bad fourcc)\n");
        return AVERROR_INVALIDDATA;
    }

    field_size = get_bits_long(&hgb, 32); /* field size */
    av_log(avctx, AV_LOG_DEBUG, "field size: 0x%"PRIx32"\n", field_size);
    skip_bits(&hgb, 32); /* padded field size */
    second_field_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "second_field_offs is %d and size is %d\n");
    av_log(avctx, AV_LOG_DEBUG, "second field offs: 0x%"PRIx32"\n",
           second_field_offs);

    dqt_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "dqt is %d and size is %d\n");
    av_log(avctx, AV_LOG_DEBUG, "dqt offs: 0x%"PRIx32"\n", dqt_offs);
    if (dqt_offs) {
        init_get_bits(&s->gb, buf_ptr+dqt_offs, (buf_end - (buf_ptr+dqt_offs))*8);
        s->start_code = DQT;
        ret = ff_mjpeg_decode_dqt(s);
        if (ret < 0 && (avctx->err_recognition & AV_EF_EXPLODE))
            return ret;
    }

    dht_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "dht is %d and size is %d\n");
    av_log(avctx, AV_LOG_DEBUG, "dht offs: 0x%"PRIx32"\n", dht_offs);
    if (dht_offs) {
        init_get_bits(&s->gb, buf_ptr+dht_offs, (buf_end - (buf_ptr+dht_offs))*8);
        s->start_code = DHT;
        ff_mjpeg_decode_dht(s);
    }

    sof_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "sof is %d and size is %d\n");
    av_log(avctx, AV_LOG_DEBUG, "sof offs: 0x%"PRIx32"\n", sof_offs);
    if (sof_offs) {
        init_get_bits(&s->gb, buf_ptr+sof_offs, (buf_end - (buf_ptr+sof_offs))*8);
        s->start_code = SOF0;
        if ((ret = ff_mjpeg_decode_sof(s)) < 0)
            return ret;
    }

    sos_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "sos is %d and size is %d\n");
    av_log(avctx, AV_LOG_DEBUG, "sos offs: 0x%"PRIx32"\n", sos_offs);
    sod_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "sof is %d and size is %d\n");
    av_log(avctx, AV_LOG_DEBUG, "sod offs: 0x%"PRIx32"\n", sod_offs);
    if (sos_offs) {
        init_get_bits(&s->gb, buf_ptr + sos_offs,
                      8 * FFMIN(field_size, buf_end - buf_ptr - sos_offs));
        s->mjpb_skiptosod = (sod_offs - sos_offs - show_bits(&s->gb, 16));
        s->start_code = SOS;
        ret = ff_mjpeg_decode_sos(s, NULL, 0, NULL);
        if (ret < 0 && (avctx->err_recognition & AV_EF_EXPLODE))
            return ret;
    }

    if (s->interlaced) {
        s->bottom_field ^= 1;
        /* if not bottom field, do not output image yet */
        if (s->bottom_field != s->interlace_polarity && second_field_offs) {
            buf_ptr = buf + second_field_offs;
            goto read_header;
        }
    }

    //XXX FIXME factorize, this looks very similar to the EOI code

    if(!s->got_picture) {
        av_log(avctx, AV_LOG_WARNING, "no picture\n");
        return buf_size;
    }

    if ((ret = av_frame_ref(data, s->picture_ptr)) < 0)
        return ret;
    *got_frame = 1;

    if (!s->lossless && avctx->debug & FF_DEBUG_QP) {
        av_log(avctx, AV_LOG_DEBUG, "QP: %d\n",
               FFMAX3(s->qscale[0], s->qscale[1], s->qscale[2]));
    }

    return buf_size;
}
示例#7
0
static AVPictureType parse_picture_type(const uint8_t *buf, int buflen, CVC1HeaderParser *vc1Header)
{
  AVPictureType pictype = AV_PICTURE_TYPE_NONE;
  int skipped = 0;
  const BYTE *framestart = buf;
  if (IS_MARKER(AV_RB32(buf))) {
    framestart = NULL;
    const BYTE *start, *end, *next;
    next = buf;
    for (start = buf, end = buf + buflen; next < end; start = next) {
      if (AV_RB32(start) == VC1_CODE_FRAME) {
        framestart = start + 4;
        break;
      }
      next = find_next_marker(start + 4, end);
    }
  }
  if (framestart) {
    GetBitContext gb;
    init_get_bits(&gb, framestart, (buflen - (framestart-buf))*8);
    if (vc1Header->hdr.profile == PROFILE_ADVANCED) {
      int fcm = PROGRESSIVE;
      if (vc1Header->hdr.interlaced)
        fcm = decode012(&gb);
      if (fcm == ILACE_FIELD) {
        int fptype = get_bits(&gb, 3);
        pictype = (fptype & 2) ? AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_I;
        if (fptype & 4) // B-picture
          pictype = (fptype & 2) ? AV_PICTURE_TYPE_BI : AV_PICTURE_TYPE_B;
      } else {
        switch (get_unary(&gb, 0, 4)) {
        case 0:
            pictype = AV_PICTURE_TYPE_P;
            break;
        case 1:
            pictype = AV_PICTURE_TYPE_B;
            break;
        case 2:
            pictype = AV_PICTURE_TYPE_I;
            break;
        case 3:
            pictype = AV_PICTURE_TYPE_BI;
            break;
        case 4:
            pictype = AV_PICTURE_TYPE_P; // skipped pic
            skipped = 1;
            break;
        }
      }
    } else {
      if (vc1Header->hdr.finterp)
        skip_bits1(&gb);
      skip_bits(&gb, 2); // framecnt
      if (vc1Header->hdr.rangered)
        skip_bits1(&gb);
      int pic = get_bits1(&gb);
      if (vc1Header->hdr.bframes) {
        if (!pic) {
          if (get_bits1(&gb)) {
            pictype = AV_PICTURE_TYPE_I;
          } else {
            pictype = AV_PICTURE_TYPE_B;
          }
        } else {
          pictype = AV_PICTURE_TYPE_P;
        }
      } else {
        pictype = pic ? AV_PICTURE_TYPE_P : AV_PICTURE_TYPE_I;
      }
    }
  }
  return pictype;
}
示例#8
0
/* most is hardcoded. should extend to handle all h263 streams */
static int h263_decode_picture_header(unsigned char *b_ptr)
{
//    int i;
        
//    for(i=0;i<16;i++) printf(" %02X",b_ptr[i]); printf("\n");
    
    buffer=b_ptr;
    bufptr=bitcnt=buf=0;

    /* picture header */
    if (get_bits(&s->gb, 22) != 0x20){
	mp_msg(MSGT_DEMUX, MSGL_FATAL, "bad picture header\n");
        return -1;
    }
    skip_bits(&s->gb, 8); /* picture timestamp */

    if (get_bits1(&s->gb) != 1){
	mp_msg(MSGT_DEMUX, MSGL_FATAL, "bad marker\n");
        return -1;	/* marker */
    }
    if (get_bits1(&s->gb) != 0){
	mp_msg(MSGT_DEMUX, MSGL_FATAL, "bad h263 id\n");
        return -1;	/* h263 id */
    }
    skip_bits1(&s->gb);	/* split screen off */
    skip_bits1(&s->gb);	/* camera  off */
    skip_bits1(&s->gb);	/* freeze picture release off */

    format = get_bits(&s->gb, 3);

    if (format != 7) {
        mp_msg(MSGT_DEMUX, MSGL_V, "h263_plus = 0  format = %d\n", format);
        /* H.263v1 */
        width = h263_format[format][0];
        height = h263_format[format][1];
	mp_msg(MSGT_DEMUX, MSGL_V, "%d x %d\n", width, height);
//        if (!width) return -1;

	mp_msg(MSGT_DEMUX, MSGL_V, "pict_type=%d\n", get_bits1(&s->gb));
	mp_msg(MSGT_DEMUX, MSGL_V, "unrestricted_mv=%d\n", get_bits1(&s->gb));
#if 1
	mp_msg(MSGT_DEMUX, MSGL_V, "SAC: %d\n", get_bits1(&s->gb));
	mp_msg(MSGT_DEMUX, MSGL_V, "advanced prediction mode: %d\n", get_bits1(&s->gb));
	mp_msg(MSGT_DEMUX, MSGL_V, "PB frame: %d\n", get_bits1(&s->gb));
#else
        if (get_bits1(&s->gb) != 0)
            return -1;	/* SAC: off */
        if (get_bits1(&s->gb) != 0)
            return -1;	/* advanced prediction mode: off */
        if (get_bits1(&s->gb) != 0)
            return -1;	/* not PB frame */
#endif
	mp_msg(MSGT_DEMUX, MSGL_V, "qscale=%d\n", get_bits(&s->gb, 5));
        skip_bits1(&s->gb);	/* Continuous Presence Multipoint mode: off */
    } else {
        mp_msg(MSGT_DEMUX, MSGL_V, "h263_plus = 1\n");
        /* H.263v2 */
        if (get_bits(&s->gb, 3) != 1){
	    mp_msg(MSGT_DEMUX, MSGL_FATAL, "H.263v2 A error\n");
            return -1;
	}
        if (get_bits(&s->gb, 3) != 6){ /* custom source format */
	    mp_msg(MSGT_DEMUX, MSGL_FATAL, "custom source format\n");
            return -1;
	}
        skip_bits(&s->gb, 12);
        skip_bits(&s->gb, 3);
	mp_msg(MSGT_DEMUX, MSGL_V, "pict_type=%d\n", get_bits(&s->gb, 3) + 1);
//        if (s->pict_type != I_TYPE &&
//            s->pict_type != P_TYPE)
//            return -1;
        skip_bits(&s->gb, 7);
        skip_bits(&s->gb, 4); /* aspect ratio */
        width = (get_bits(&s->gb, 9) + 1) * 4;
        skip_bits1(&s->gb);
        height = get_bits(&s->gb, 9) * 4;
	mp_msg(MSGT_DEMUX, MSGL_V, "%d x %d\n", width, height);
        //if (height == 0)
        //    return -1;
	mp_msg(MSGT_DEMUX, MSGL_V, "qscale=%d\n", get_bits(&s->gb, 5));
    }

    /* PEI */
    while (get_bits1(&s->gb) != 0) {
        skip_bits(&s->gb, 8);
    }
//    s->f_code = 1;
//    s->width = width;
//    s->height = height;
    return 0;
}
示例#9
0
static int read_stream_mux_config(struct LATMContext *latmctx,
                                  GetBitContext *gb)
{
    int ret, audio_mux_version = get_bits(gb, 1);

    latmctx->audio_mux_version_A = 0;
    if (audio_mux_version)
        latmctx->audio_mux_version_A = get_bits(gb, 1);

    if (!latmctx->audio_mux_version_A) {

        if (audio_mux_version)
            latm_get_value(gb);                 // taraFullness

        skip_bits(gb, 1);                       // allStreamSameTimeFraming
        skip_bits(gb, 6);                       // numSubFrames
        // numPrograms
        if (get_bits(gb, 4)) {                  // numPrograms
            avpriv_request_sample(latmctx->aac_ctx.avctx, "Multiple programs");
            return AVERROR_PATCHWELCOME;
        }

        // for each program (which there is only one in DVB)

        // for each layer (which there is only one in DVB)
        if (get_bits(gb, 3)) {                   // numLayer
            avpriv_request_sample(latmctx->aac_ctx.avctx, "Multiple layers");
            return AVERROR_PATCHWELCOME;
        }

        // for all but first stream: use_same_config = get_bits(gb, 1);
        if (!audio_mux_version) {
            if ((ret = latm_decode_audio_specific_config(latmctx, gb, 0)) < 0)
                return ret;
        } else {
            int ascLen = latm_get_value(gb);
            if ((ret = latm_decode_audio_specific_config(latmctx, gb, ascLen)) < 0)
                return ret;
            ascLen -= ret;
            skip_bits_long(gb, ascLen);
        }

        latmctx->frame_length_type = get_bits(gb, 3);
        switch (latmctx->frame_length_type) {
        case 0:
            skip_bits(gb, 8);       // latmBufferFullness
            break;
        case 1:
            latmctx->frame_length = get_bits(gb, 9);
            break;
        case 3:
        case 4:
        case 5:
            skip_bits(gb, 6);       // CELP frame length table index
            break;
        case 6:
        case 7:
            skip_bits(gb, 1);       // HVXC frame length table index
            break;
        }

        if (get_bits(gb, 1)) {                  // other data
            if (audio_mux_version) {
                latm_get_value(gb);             // other_data_bits
            } else {
                int esc;
                do {
                    esc = get_bits(gb, 1);
                    skip_bits(gb, 8);
                } while (esc);
            }
        }

        if (get_bits(gb, 1))                     // crc present
            skip_bits(gb, 8);                    // config_crc
    }

    return 0;
}
示例#10
0
文件: ituh263dec.c 项目: cxxg/FFmpeg
/**
 * read the next MVs for OBMC. yes this is a ugly hack, feel free to send a patch :)
 */
static void preview_obmc(MpegEncContext *s){
    GetBitContext gb= s->gb;

    int cbpc, i, pred_x, pred_y, mx, my;
    int16_t *mot_val;
    const int xy= s->mb_x + 1 + s->mb_y * s->mb_stride;
    const int stride= s->b8_stride*2;

    for(i=0; i<4; i++)
        s->block_index[i]+= 2;
    for(i=4; i<6; i++)
        s->block_index[i]+= 1;
    s->mb_x++;

    av_assert2(s->pict_type == AV_PICTURE_TYPE_P);

    do{
        if (get_bits1(&s->gb)) {
            /* skip mb */
            mot_val = s->current_picture.motion_val[0][s->block_index[0]];
            mot_val[0       ]= mot_val[2       ]=
            mot_val[0+stride]= mot_val[2+stride]= 0;
            mot_val[1       ]= mot_val[3       ]=
            mot_val[1+stride]= mot_val[3+stride]= 0;

            s->current_picture.mb_type[xy] = MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
            goto end;
        }
        cbpc = get_vlc2(&s->gb, ff_h263_inter_MCBPC_vlc.table, INTER_MCBPC_VLC_BITS, 2);
    }while(cbpc == 20);

    if(cbpc & 4){
        s->current_picture.mb_type[xy] = MB_TYPE_INTRA;
    }else{
        get_vlc2(&s->gb, ff_h263_cbpy_vlc.table, CBPY_VLC_BITS, 1);
        if (cbpc & 8) {
            if(s->modified_quant){
                if(get_bits1(&s->gb)) skip_bits(&s->gb, 1);
                else                  skip_bits(&s->gb, 5);
            }else
                skip_bits(&s->gb, 2);
        }

        if ((cbpc & 16) == 0) {
                s->current_picture.mb_type[xy] = MB_TYPE_16x16 | MB_TYPE_L0;
                /* 16x16 motion prediction */
                mot_val= ff_h263_pred_motion(s, 0, 0, &pred_x, &pred_y);
                if (s->umvplus)
                   mx = h263p_decode_umotion(s, pred_x);
                else
                   mx = ff_h263_decode_motion(s, pred_x, 1);

                if (s->umvplus)
                   my = h263p_decode_umotion(s, pred_y);
                else
                   my = ff_h263_decode_motion(s, pred_y, 1);

                mot_val[0       ]= mot_val[2       ]=
                mot_val[0+stride]= mot_val[2+stride]= mx;
                mot_val[1       ]= mot_val[3       ]=
                mot_val[1+stride]= mot_val[3+stride]= my;
        } else {
            s->current_picture.mb_type[xy] = MB_TYPE_8x8 | MB_TYPE_L0;
            for(i=0;i<4;i++) {
                mot_val = ff_h263_pred_motion(s, i, 0, &pred_x, &pred_y);
                if (s->umvplus)
                  mx = h263p_decode_umotion(s, pred_x);
                else
                  mx = ff_h263_decode_motion(s, pred_x, 1);

                if (s->umvplus)
                  my = h263p_decode_umotion(s, pred_y);
                else
                  my = ff_h263_decode_motion(s, pred_y, 1);
                if (s->umvplus && (mx - pred_x) == 1 && (my - pred_y) == 1)
                  skip_bits1(&s->gb); /* Bit stuffing to prevent PSC */
                mot_val[0] = mx;
                mot_val[1] = my;
            }
        }
    }
end:

    for(i=0; i<4; i++)
        s->block_index[i]-= 2;
    for(i=4; i<6; i++)
        s->block_index[i]-= 1;
    s->mb_x--;

    s->gb= gb;
}
示例#11
0
/**
 * decodes a macroblock
 * @return <0 if an error occurred
 */
static int h261_decode_block(H261Context * h, DCTELEM * block,
                             int n, int coded)
{
    MpegEncContext * const s = &h->s;
    int code, level, i, j, run;
    RLTable *rl = &h261_rl_tcoeff;
    const uint8_t *scan_table;

    // For the variable length encoding there are two code tables, one being used for
    // the first transmitted LEVEL in INTER, INTER+MC and INTER+MC+FIL blocks, the second
    // for all other LEVELs except the first one in INTRA blocks which is fixed length
    // coded with 8 bits.
    // NOTE: the two code tables only differ in one VLC so we handle that manually.
    scan_table = s->intra_scantable.permutated;
    if (s->mb_intra){
        /* DC coef */
        level = get_bits(&s->gb, 8);
        // 0 (00000000b) and -128 (10000000b) are FORBIDDEN
        if((level&0x7F) == 0){
            av_log(s->avctx, AV_LOG_ERROR, "illegal dc %d at %d %d\n", level, s->mb_x, s->mb_y);
            return -1;
        }
        // The code 1000 0000 is not used, the reconstruction level of 1024 being coded as 1111 1111.
        if (level == 255)
            level = 128;
        block[0] = level;
        i = 1;
    }else if(coded){
        // Run  Level   Code
        // EOB                  Not possible for first level when cbp is available (that's why the table is different)
        // 0    1               1s
        // *    *               0*
        int check = show_bits(&s->gb, 2);
        i = 0;
        if ( check & 0x2 ){
            skip_bits(&s->gb, 2);
            block[0] = ( check & 0x1 ) ? -1 : 1;
            i = 1;
        }
    }else{
        i = 0;
    }
    if(!coded){
        s->block_last_index[n] = i - 1;
        return 0;
    }
    for(;;){
        code = get_vlc2(&s->gb, rl->vlc.table, TCOEFF_VLC_BITS, 2);
        if (code < 0){
            av_log(s->avctx, AV_LOG_ERROR, "illegal ac vlc code at %dx%d\n", s->mb_x, s->mb_y);
            return -1;
        }
        if (code == rl->n) {
            /* escape */
            // The remaining combinations of (run, level) are encoded with a 20-bit word consisting of 6 bits escape, 6 bits run and 8 bits level.
            run = get_bits(&s->gb, 6);
            level = get_sbits(&s->gb, 8);
        }else if(code == 0){
            break;
        }else{
            run = rl->table_run[code];
            level = rl->table_level[code];
            if (get_bits1(&s->gb))
                level = -level;
        }
        i += run;
        if (i >= 64){
            av_log(s->avctx, AV_LOG_ERROR, "run overflow at %dx%d\n", s->mb_x, s->mb_y);
            return -1;
        }
        j = scan_table[i];
        block[j] = level;
        i++;
    }
    s->block_last_index[n] = i-1;
    return 0;
}
示例#12
0
// Return TRUE if the data parsing finished, FALSE otherwise.
// estream->pos is advanced. Data is only processed if esstream->error
// is FALSE, parsing can set esstream->error to TRUE.
static int gop_header(struct bitstream *esstream)
{
    dbg_print(DMT_VERBOSE, "GOP header\n");

    if (esstream->error || esstream->bitsleft <= 0)
        return 0;

    // We only get here after seeing that start code
    if (read_u32(esstream) != 0xB8010000) // LSB first (0x000001B8)
        fatal(EXIT_BUG_BUG, "Impossible!");

    unsigned drop_frame_flag = (unsigned) read_bits(esstream,1);
    struct gop_time_code gtc;
    gtc.time_code_hours = (int) read_bits(esstream,5);
    gtc.time_code_minutes = (int) read_bits(esstream,6);
    skip_bits(esstream,1); // Marker bit
    gtc.time_code_seconds = (int) read_bits(esstream,6);
    gtc.time_code_pictures = (int) read_bits(esstream,6);
    gtc.inited = 1;
    calculate_ms_gop_time(&gtc);

    if (esstream->bitsleft < 0)
        return 0;

    if (gop_accepted(&gtc))
    {
        // Do GOP padding during GOP header. The previous GOP and all
        // included captions are written. Use the current GOP time to
        // do the padding.

        // Flush buffered cc blocks before doing the housekeeping
        if (has_ccdata_buffered)
        {
            process_hdcc();
        }

        // Last GOPs pulldown frames
        if ((current_pulldownfields>0) != (pulldownfields>0))
        {
            current_pulldownfields = pulldownfields;
            dbg_print(DMT_VERBOSE, "Pulldown: %s", (pulldownfields ? "on" : "off"));
            if (pulldownfields)
                dbg_print(DMT_VERBOSE, " - %u fields in last GOP", pulldownfields);
            dbg_print(DMT_VERBOSE, "\n");
        }
        pulldownfields = 0;

        // Report synchronization jumps between GOPs. Warn if there
        // are 20% or more deviation.
        if ( (debug_mask & DMT_TIME)
             && ((gtc.ms - gop_time.ms // more than 20% longer
                  > frames_since_last_gop*1000.0/current_fps*1.2)
                 ||
                 (gtc.ms - gop_time.ms // or 20% shorter
                  < frames_since_last_gop*1000.0/current_fps*0.8))
             && first_gop_time.inited )
        {
            mprint("\rWarning: Jump in GOP timing.\n");
            mprint("  (old) %s",
                   print_mstime(gop_time.ms));
            mprint("  +  %s (%uF)",
                   print_mstime(LLONG(frames_since_last_gop
                                      *1000.0/current_fps)),
                   frames_since_last_gop);
            mprint("  !=  (new) %s\n",
                   print_mstime(gtc.ms));
        }

        if (first_gop_time.inited == 0)
        {
            first_gop_time = gtc;

            // It needs to be "+1" because the frame count starts at 0 and we
            // need the length of all frames.
            if ( total_frames_count == 0 )
            {   // If this is the first frame there cannot be an offset
                fts_fc_offset = 0;
                // first_gop_time.ms stays unchanged
            }
            else
            {
                fts_fc_offset = LLONG((total_frames_count+1)
                                      *1000.0/current_fps);
                // Compensate for those written before
                first_gop_time.ms -= fts_fc_offset;
            }

            dbg_print(DMT_TIME, "\nFirst GOP time: %02u:%02u:%02u:%03u %+lldms\n",
                    gtc.time_code_hours,
                    gtc.time_code_minutes, gtc.time_code_seconds,
                    unsigned(1000.0*gtc.time_code_pictures/current_fps),
                    fts_fc_offset);
        }

        gop_time = gtc;

        frames_since_last_gop=0;
        // Indicate that we read a gop header (since last frame number 0)
        saw_gop_header = 1;

        // If we use GOP timing, reconstruct the PTS from the GOP
        if (use_gop_as_pts)
        {
            current_pts = gtc.ms*(MPEG_CLOCK_FREQ/1000);
            if (pts_set==0)
                pts_set=1;
            current_tref = 0;
            frames_since_ref_time = 0;
            set_fts();
            fts_at_gop_start = get_fts_max();
        }
        else
        {
            // FIXME: Wrong when PTS are not increasing but are identical
            // troughout the GOP and then jump to the next time for the
            // next GOP.
            // This effect will also lead to captions being one GOP early
            // for DVD captions.
            fts_at_gop_start = get_fts_max() + LLONG(1000.0/current_fps);
        }

        if (debug_mask & DMT_TIME)
        {
            dbg_print(DMT_TIME, "\nNew GOP:\n");
            dbg_print(DMT_TIME, "\nDrop frame flag: %u:\n", drop_frame_flag);
            print_debug_timing();
        }
    }

    return 1;
}
示例#13
0
static int decode_picture_timing(H264Context *h)
{
    SPS *sps = &h->sps;
    int i;

    for (i = 0; i<MAX_SPS_COUNT; i++)
        if (!sps->log2_max_frame_num && h->sps_buffers[i])
            sps = h->sps_buffers[i];

    if (sps->nal_hrd_parameters_present_flag || sps->vcl_hrd_parameters_present_flag) {
        h->sei_cpb_removal_delay = get_bits_long(&h->gb,
                                                 sps->cpb_removal_delay_length);
        h->sei_dpb_output_delay  = get_bits_long(&h->gb,
                                                 sps->dpb_output_delay_length);
    }
    if (sps->pic_struct_present_flag) {
        unsigned int i, num_clock_ts;

        h->sei_pic_struct = get_bits(&h->gb, 4);
        h->sei_ct_type    = 0;

        if (h->sei_pic_struct > SEI_PIC_STRUCT_FRAME_TRIPLING)
            return AVERROR_INVALIDDATA;

        num_clock_ts = sei_num_clock_ts_table[h->sei_pic_struct];

        for (i = 0; i < num_clock_ts; i++) {
            if (get_bits(&h->gb, 1)) {                /* clock_timestamp_flag */
                unsigned int full_timestamp_flag;

                h->sei_ct_type |= 1 << get_bits(&h->gb, 2);
                skip_bits(&h->gb, 1);                 /* nuit_field_based_flag */
                skip_bits(&h->gb, 5);                 /* counting_type */
                full_timestamp_flag = get_bits(&h->gb, 1);
                skip_bits(&h->gb, 1);                 /* discontinuity_flag */
                skip_bits(&h->gb, 1);                 /* cnt_dropped_flag */
                skip_bits(&h->gb, 8);                 /* n_frames */
                if (full_timestamp_flag) {
                    skip_bits(&h->gb, 6);             /* seconds_value 0..59 */
                    skip_bits(&h->gb, 6);             /* minutes_value 0..59 */
                    skip_bits(&h->gb, 5);             /* hours_value 0..23 */
                } else {
                    if (get_bits(&h->gb, 1)) {        /* seconds_flag */
                        skip_bits(&h->gb, 6);         /* seconds_value range 0..59 */
                        if (get_bits(&h->gb, 1)) {    /* minutes_flag */
                            skip_bits(&h->gb, 6);     /* minutes_value 0..59 */
                            if (get_bits(&h->gb, 1))  /* hours_flag */
                                skip_bits(&h->gb, 5); /* hours_value 0..23 */
                        }
                    }
                }
                if (sps->time_offset_length > 0)
                    skip_bits(&h->gb,
                              sps->time_offset_length); /* time_offset */
            }
        }

        if (h->avctx->debug & FF_DEBUG_PICT_INFO)
            av_log(h->avctx, AV_LOG_DEBUG, "ct_type:%X pic_struct:%d\n",
                   h->sei_ct_type, h->sei_pic_struct);
    }
    return 0;
}
示例#14
0
// Return TRUE if the data parsing finished, FALSE otherwise.
// estream->pos is advanced. Data is only processed if esstream->error
// is FALSE, parsing can set esstream->error to TRUE.
static int sequence_header(struct bitstream *esstream)
{
    dbg_print(DMT_VERBOSE, "Sequence header\n");

    if (esstream->error || esstream->bitsleft <= 0)
        return 0;

    // We only get here after seeing that start code
    if (read_u32(esstream) != 0xB3010000) // LSB first (0x000001B3)
        fatal(EXIT_BUG_BUG, "Impossible!");

    unsigned hor_size = (unsigned) read_bits(esstream,12);
    unsigned vert_size = (unsigned) read_bits(esstream,12);
    unsigned aspect_ratio = (unsigned) read_bits(esstream,4);
    unsigned frame_rate = (unsigned) read_bits(esstream,4);

    // Discard some information
    read_bits(esstream, 18+1+10+1);

    // load_intra_quantiser_matrix
    if (read_bits(esstream,1))
        skip_bits(esstream, 8*64);
    // load_non_intra_quantiser_matrix
    if (read_bits(esstream,1))
        skip_bits(esstream, 8*64);

    if (esstream->bitsleft < 0)
        return 0;

    // If we got the whole sequence, process
    if (hor_size!=current_hor_size ||
        vert_size!=current_vert_size ||
        aspect_ratio!=current_aspect_ratio ||
        frame_rate!=current_frame_rate)
    {
        // If horizontal/vertical size, framerate and/or aspect
        // ratio are ilegal, we discard the
        // whole sequence info.
        if (vert_size >= 288 && vert_size <= 1088 && 
            hor_size >= 352 && hor_size <= 1920 &&
            hor_size / vert_size >= 352/576 && hor_size / vert_size <= 2 &&
            frame_rate>0 && frame_rate<9 &&
            aspect_ratio>0 && aspect_ratio<5)
        {
            mprint ("\n\nNew video information found");
            if (pts_set==2)
            {
                unsigned cur_sec = (unsigned) ((current_pts - min_pts)
                                               / MPEG_CLOCK_FREQ);
                mprint (" at %02u:%02u",cur_sec/60, cur_sec % 60);
            }
            mprint ("\n");                                          
            mprint ("[%u * %u] [AR: %s] [FR: %s]",
                    hor_size,vert_size,
                    aspect_ratio_types[aspect_ratio],
                    framerates_types[frame_rate]);
            // No newline, force the output of progressive info in picture
            // info part.
            current_progressive_sequence = 2;

            current_hor_size=hor_size;
            current_vert_size=vert_size;
            current_aspect_ratio=aspect_ratio;
            current_frame_rate=frame_rate;
            current_fps = framerates_values[current_frame_rate];
            activity_video_info (hor_size,vert_size,
                                 aspect_ratio_types[aspect_ratio],
                                 framerates_types[frame_rate]);
        } 
        else 
        {
            dbg_print(DMT_VERBOSE, "\nInvalid sequence header:\n");
            dbg_print(DMT_VERBOSE, "V: %u H: %u FR: %u AS: %u\n",
                   vert_size, hor_size, frame_rate, aspect_ratio);
            esstream->error = 1;
            return 0;
        }
    }

    // Read complete
    return 1;
}
示例#15
0
// Return TRUE if the video sequence was finished, FALSE
// Otherwise.  estream->pos shall point to the position where
// the next call will continue, i.e. the possible begin of an
// unfinished video sequence or after the finished sequence.
static int es_video_sequence(struct bitstream *esstream)
{
    // Avoid "Skip forward" message on first call and later only
    // once per search.
    static int noskipmessage = 1;
    uint8_t startcode;

    dbg_print(DMT_VERBOSE, "es_video_sequence()\n");

    esstream->error = 0;

    // Analyze sequence header ...
    if (!no_bitstream_error)
    {
        // We might start here because of a syntax error. Discard
        // all data until a new sequence_header_code or group_start_code
        // is found.

        if (!noskipmessage) // Avoid unnecessary output.
            mprint("\nSkip forward to the next Sequence or GOP start.\n");
        else
            noskipmessage = 0;

        uint8_t startcode;
        while(1)
        {
            // search_start_code() cannot produce esstream->error
            startcode = search_start_code(esstream);
            if (esstream->bitsleft < 0)
            {
                noskipmessage = 1;
                return 0;
            }

            if (startcode == 0xB3 || startcode == 0xB8) // found it
                break;

            skip_bits(esstream, 4*8);
        }

        no_bitstream_error = 1;
        saw_seqgoppic = 0;
        in_pic_data = 0;
    }

    do
    {
        startcode = next_start_code(esstream);

        dbg_print(DMT_VERBOSE, "\nM2V - next start code %02X %d\n", startcode, in_pic_data);

        // Syntax check - also returns on bitsleft < 0
        if (startcode == 0xB4)
        {
            if (esstream->error)
            {
                no_bitstream_error = 0;
                dbg_print(DMT_VERBOSE, "es_video_sequence: syntax problem.\n");
            }

            dbg_print(DMT_VERBOSE, "es_video_sequence: return on B4 startcode.\n");

            return 0;
        }

        // Sequence_end_code
        if (startcode == 0xB7)
        {
            read_u32(esstream); // Advance bitstream
            no_bitstream_error = 0;
            break;
        }

        if (!in_pic_data && startcode == 0xB3)
        {
            if (!read_seq_info(esstream))
            {
                if (esstream->error)
                    no_bitstream_error = 0;
                return 0;
            }
            saw_seqgoppic = 1;
            continue;
        }

        if (!in_pic_data && startcode == 0xB8)
        {
            if (!read_gop_info(esstream))
            {
                if (esstream->error)
                    no_bitstream_error = 0;
                return 0;
            }
            saw_seqgoppic = 2;
            continue;
        }

        if (!in_pic_data && startcode == 0x00)
        {
            if (!read_pic_info(esstream))
            {
                if (esstream->error)
                    no_bitstream_error = 0;
                return 0;
            }
            saw_seqgoppic = 3;
            in_pic_data = 1;
            continue;
        }

        // Only looks for extension and user data if we saw sequence, gop
        // or picture info before.
        // This check needs to be before the "in_pic_data" part.
        if ( saw_seqgoppic && (startcode == 0xB2 || startcode == 0xB5))
        {
            if (!read_eau_info(esstream, saw_seqgoppic-1))
            {
                if (esstream->error)
                    no_bitstream_error = 0;
                return 0;
            }
            saw_seqgoppic = 0;
            continue;
        }

        if (in_pic_data) // See comment in read_pic_data()
        {
            if (!read_pic_data(esstream))
            {
                if (esstream->error)
                    no_bitstream_error = 0;
                return 0;
            }
            saw_seqgoppic = 0;
            in_pic_data = 0;
            continue;
        }

        // Nothing found - bitstream error
        if (startcode == 0xBA)
        {
            mprint("\nFound PACK header in ES data.  Probably wrong stream mode!\n");
        }
        else
        {
            mprint("\nUnexpected startcode: %02X\n", startcode);
        }
        no_bitstream_error = 0;
        return 0;
    }
    while(1);

    return 1;
}
示例#16
0
static int vp9_raw_reorder_frame_parse(AVBSFContext *bsf, VP9RawReorderFrame *frame)
{
    GetBitContext bc;
    int err;

    unsigned int frame_marker;
    unsigned int profile_low_bit, profile_high_bit, reserved_zero;
    unsigned int error_resilient_mode;
    unsigned int frame_sync_code;

    err = init_get_bits(&bc, frame->packet->data, 8 * frame->packet->size);
    if (err)
        return err;

    frame_marker = get_bits(&bc, 2);
    if (frame_marker != 2) {
        av_log(bsf, AV_LOG_ERROR, "Invalid frame marker: %u.\n",
               frame_marker);
        return AVERROR_INVALIDDATA;
    }

    profile_low_bit  = get_bits1(&bc);
    profile_high_bit = get_bits1(&bc);
    frame->profile = (profile_high_bit << 1) | profile_low_bit;
    if (frame->profile == 3) {
        reserved_zero = get_bits1(&bc);
        if (reserved_zero != 0) {
            av_log(bsf, AV_LOG_ERROR, "Profile reserved_zero bit set: "
                   "unsupported profile or invalid bitstream.\n");
            return AVERROR_INVALIDDATA;
        }
    }

    frame->show_existing_frame = get_bits1(&bc);
    if (frame->show_existing_frame) {
        frame->frame_to_show = get_bits(&bc, 3);
        return 0;
    }

    frame->frame_type = get_bits1(&bc);
    frame->show_frame = get_bits1(&bc);
    error_resilient_mode = get_bits1(&bc);

    if (frame->frame_type == 0) {
        frame_sync_code = get_bits(&bc, 24);
        if (frame_sync_code != 0x498342) {
            av_log(bsf, AV_LOG_ERROR, "Invalid frame sync code: %06x.\n",
                   frame_sync_code);
            return AVERROR_INVALIDDATA;
        }
        frame->refresh_frame_flags = 0xff;
    } else {
        unsigned int intra_only;

        if (frame->show_frame == 0)
            intra_only = get_bits1(&bc);
        else
            intra_only = 0;
        if (error_resilient_mode == 0) {
            // reset_frame_context
            skip_bits(&bc, 2);
        }
        if (intra_only) {
            frame_sync_code = get_bits(&bc, 24);
            if (frame_sync_code != 0x498342) {
                av_log(bsf, AV_LOG_ERROR, "Invalid frame sync code: "
                       "%06x.\n", frame_sync_code);
                return AVERROR_INVALIDDATA;
            }
            if (frame->profile > 0) {
                unsigned int color_space;
                if (frame->profile >= 2) {
                    // ten_or_twelve_bit
                    skip_bits(&bc, 1);
                }
                color_space = get_bits(&bc, 3);
                if (color_space != 7 /* CS_RGB */) {
                    // color_range
                    skip_bits(&bc, 1);
                    if (frame->profile == 1 || frame->profile == 3) {
                        // subsampling
                        skip_bits(&bc, 3);
                    }
                } else {
                    if (frame->profile == 1 || frame->profile == 3)
                        skip_bits(&bc, 1);
                }
            }
            frame->refresh_frame_flags = get_bits(&bc, 8);
        } else {
            frame->refresh_frame_flags = get_bits(&bc, 8);
        }
    }

    return 0;
}
示例#17
0
文件: dtsdec.c 项目: 63n/FFmpeg
static int dts_probe(AVProbeData *p)
{
    const uint8_t *buf, *bufp;
    uint32_t state = -1;
    int markers[4*16] = {0};
    int sum, max, i;
    int64_t diff = 0;
    uint8_t hdr[12 + FF_INPUT_BUFFER_PADDING_SIZE] = { 0 };

    buf = p->buf + FFMIN(4096, p->buf_size);

    for(; buf < (p->buf+p->buf_size)-2; buf+=2) {
        int marker, sample_blocks, sample_rate, sr_code, framesize;
        int lfe;
        GetBitContext gb;

        bufp = buf;
        state = (state << 16) | bytestream_get_be16(&bufp);

        if (buf - p->buf >= 4)
            diff += FFABS(((int16_t)AV_RL16(buf)) - (int16_t)AV_RL16(buf-4));

        /* regular bitstream */
        if (state == DCA_SYNCWORD_CORE_BE)
            marker = 0;
        else if (state == DCA_SYNCWORD_CORE_LE)
            marker = 1;

        /* 14 bits big-endian bitstream */
        else if (state == DCA_SYNCWORD_CORE_14B_BE &&
                 (bytestream_get_be16(&bufp) & 0xFFF0) == 0x07F0)
            marker = 2;

        /* 14 bits little-endian bitstream */
        else if (state == DCA_SYNCWORD_CORE_14B_LE &&
                 (bytestream_get_be16(&bufp) & 0xF0FF) == 0xF007)
            marker = 3;
        else
            continue;

        if (avpriv_dca_convert_bitstream(buf-2, 12, hdr, 12) < 0)
            continue;

        init_get_bits(&gb, hdr, 96);
        skip_bits_long(&gb, 39);

        sample_blocks = get_bits(&gb, 7) + 1;
        if (sample_blocks < 8)
            continue;

        framesize = get_bits(&gb, 14) + 1;
        if (framesize < 95)
            continue;

        skip_bits(&gb, 6);
        sr_code = get_bits(&gb, 4);
        sample_rate = avpriv_dca_sample_rates[sr_code];
        if (sample_rate == 0)
            continue;

        get_bits(&gb, 5);
        if (get_bits(&gb, 1))
            continue;

        skip_bits_long(&gb, 9);
        lfe = get_bits(&gb, 2);
        if (lfe > 2)
            continue;

        marker += 4* sr_code;

        markers[marker] ++;
    }

    sum = max = 0;
    for (i=0; i<FF_ARRAY_ELEMS(markers); i++) {
        sum += markers[i];
        if (markers[max] < markers[i])
            max = i;
    }

    if (markers[max] > 3 && p->buf_size / markers[max] < 32*1024 &&
        markers[max] * 4 > sum * 3 &&
        diff / p->buf_size > 200)
        return AVPROBE_SCORE_EXTENSION + 1;

    return 0;
}
示例#18
0
文件: mpc8.c 项目: KindDragon/FFmpeg
static int mpc8_decode_frame(AVCodecContext * avctx, void *data,
                             int *got_frame_ptr, AVPacket *avpkt)
{
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    MPCContext *c = avctx->priv_data;
    GetBitContext gb2, *gb = &gb2;
    int i, j, k, ch, cnt, res, t;
    Band *bands = c->bands;
    int off;
    int maxband, keyframe;
    int last[2];

    /* get output buffer */
    c->frame.nb_samples = MPC_FRAME_SIZE;
    if ((res = avctx->get_buffer(avctx, &c->frame)) < 0) {
        av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
        return res;
    }

    keyframe = c->cur_frame == 0;

    if(keyframe){
        memset(c->Q, 0, sizeof(c->Q));
        c->last_bits_used = 0;
    }
    init_get_bits(gb, buf, buf_size * 8);
    skip_bits(gb, c->last_bits_used & 7);

    if(keyframe)
        maxband = mpc8_get_mod_golomb(gb, c->maxbands + 1);
    else{
        maxband = c->last_max_band + get_vlc2(gb, band_vlc.table, MPC8_BANDS_BITS, 2);
        if(maxband > 32) maxband -= 33;
    }

    if(maxband > c->maxbands + 1) {
        av_log(avctx, AV_LOG_ERROR, "maxband %d too large\n",maxband);
        return AVERROR_INVALIDDATA;
    }
    c->last_max_band = maxband;

    /* read subband indexes */
    if(maxband){
        last[0] = last[1] = 0;
        for(i = maxband - 1; i >= 0; i--){
            for(ch = 0; ch < 2; ch++){
                last[ch] = get_vlc2(gb, res_vlc[last[ch] > 2].table, MPC8_RES_BITS, 2) + last[ch];
                if(last[ch] > 15) last[ch] -= 17;
                bands[i].res[ch] = last[ch];
            }
        }
        if(c->MSS){
            int mask;

            cnt = 0;
            for(i = 0; i < maxband; i++)
                if(bands[i].res[0] || bands[i].res[1])
                    cnt++;
            t = mpc8_get_mod_golomb(gb, cnt);
            mask = mpc8_get_mask(gb, cnt, t);
            for(i = maxband - 1; i >= 0; i--)
                if(bands[i].res[0] || bands[i].res[1]){
                    bands[i].msf = mask & 1;
                    mask >>= 1;
                }
        }
示例#19
0
static int decode_picture_timing(H264Context *h){
    MpegEncContext * const s = &h->s;
    if(h->sps.nal_hrd_parameters_present_flag || h->sps.vcl_hrd_parameters_present_flag){
        h->sei_cpb_removal_delay = get_bits(&s->gb, h->sps.cpb_removal_delay_length);
        h->sei_dpb_output_delay = get_bits(&s->gb, h->sps.dpb_output_delay_length);
    }
    if(h->sps.pic_struct_present_flag){
        unsigned int i, num_clock_ts;
        h->sei_pic_struct = get_bits(&s->gb, 4);
        h->sei_ct_type    = 0;

        if (h->sei_pic_struct > SEI_PIC_STRUCT_FRAME_TRIPLING)
            return -1;

        num_clock_ts = sei_num_clock_ts_table[h->sei_pic_struct];

        for (i = 0 ; i < num_clock_ts ; i++){
            if(get_bits(&s->gb, 1)){                  /* clock_timestamp_flag */
                unsigned int full_timestamp_flag;
                h->sei_ct_type |= 1<<get_bits(&s->gb, 2);
                skip_bits(&s->gb, 1);                 /* nuit_field_based_flag */
                skip_bits(&s->gb, 5);                 /* counting_type */
                full_timestamp_flag = get_bits(&s->gb, 1);
                skip_bits(&s->gb, 1);                 /* discontinuity_flag */
                skip_bits(&s->gb, 1);                 /* cnt_dropped_flag */
                skip_bits(&s->gb, 8);                 /* n_frames */
                if(full_timestamp_flag){
                    skip_bits(&s->gb, 6);             /* seconds_value 0..59 */
                    skip_bits(&s->gb, 6);             /* minutes_value 0..59 */
                    skip_bits(&s->gb, 5);             /* hours_value 0..23 */
                }else{
                    if(get_bits(&s->gb, 1)){          /* seconds_flag */
                        skip_bits(&s->gb, 6);         /* seconds_value range 0..59 */
                        if(get_bits(&s->gb, 1)){      /* minutes_flag */
                            skip_bits(&s->gb, 6);     /* minutes_value 0..59 */
                            if(get_bits(&s->gb, 1))   /* hours_flag */
                                skip_bits(&s->gb, 5); /* hours_value 0..23 */
                        }
                    }
                }
                if(h->sps.time_offset_length > 0)
                    skip_bits(&s->gb, h->sps.time_offset_length); /* time_offset */
            }
        }

        if(s->avctx->debug & FF_DEBUG_PICT_INFO)
            av_log(s->avctx, AV_LOG_DEBUG, "ct_type:%X pic_struct:%d\n", h->sei_ct_type, h->sei_pic_struct);
    }
    return 0;
}
示例#20
0
文件: aacps.c 项目: Arcen/FFmpeg
int ff_ps_read_data(AVCodecContext *avctx, GetBitContext *gb_host, PSContext *ps, int bits_left)
{
    int e;
    int bit_count_start = get_bits_count(gb_host);
    int header;
    int bits_consumed;
    GetBitContext gbc = *gb_host, *gb = &gbc;

    header = get_bits1(gb);
    if (header) {     //enable_ps_header
        ps->enable_iid = get_bits1(gb);
        if (ps->enable_iid) {
            int iid_mode = get_bits(gb, 3);
            if (iid_mode > 5) {
                av_log(avctx, AV_LOG_ERROR, "iid_mode %d is reserved.\n",
                       iid_mode);
                goto err;
            }
            ps->nr_iid_par    = nr_iidicc_par_tab[iid_mode];
            ps->iid_quant     = iid_mode > 2;
            ps->nr_ipdopd_par = nr_iidopd_par_tab[iid_mode];
        }
        ps->enable_icc = get_bits1(gb);
        if (ps->enable_icc) {
            ps->icc_mode = get_bits(gb, 3);
            if (ps->icc_mode > 5) {
                av_log(avctx, AV_LOG_ERROR, "icc_mode %d is reserved.\n",
                       ps->icc_mode);
                goto err;
            }
            ps->nr_icc_par = nr_iidicc_par_tab[ps->icc_mode];
        }
        ps->enable_ext = get_bits1(gb);
    }

    ps->frame_class = get_bits1(gb);
    ps->num_env_old = ps->num_env;
    ps->num_env     = num_env_tab[ps->frame_class][get_bits(gb, 2)];

    ps->border_position[0] = -1;
    if (ps->frame_class) {
        for (e = 1; e <= ps->num_env; e++)
            ps->border_position[e] = get_bits(gb, 5);
    } else
        for (e = 1; e <= ps->num_env; e++)
            ps->border_position[e] = (e * numQMFSlots >> ff_log2_tab[ps->num_env]) - 1;

    if (ps->enable_iid) {
        for (e = 0; e < ps->num_env; e++) {
            int dt = get_bits1(gb);
            if (read_iid_data(avctx, gb, ps, ps->iid_par, huff_iid[2*dt+ps->iid_quant], e, dt))
                goto err;
        }
    } else
        memset(ps->iid_par, 0, sizeof(ps->iid_par));

    if (ps->enable_icc)
        for (e = 0; e < ps->num_env; e++) {
            int dt = get_bits1(gb);
            if (read_icc_data(avctx, gb, ps, ps->icc_par, dt ? huff_icc_dt : huff_icc_df, e, dt))
                goto err;
        }
    else
        memset(ps->icc_par, 0, sizeof(ps->icc_par));

    if (ps->enable_ext) {
        int cnt = get_bits(gb, 4);
        if (cnt == 15) {
            cnt += get_bits(gb, 8);
        }
        cnt *= 8;
        while (cnt > 7) {
            int ps_extension_id = get_bits(gb, 2);
            cnt -= 2 + ps_read_extension_data(gb, ps, ps_extension_id);
        }
        if (cnt < 0) {
            av_log(avctx, AV_LOG_ERROR, "ps extension overflow %d", cnt);
            goto err;
        }
        skip_bits(gb, cnt);
    }

    ps->enable_ipdopd &= !PS_BASELINE;

    //Fix up envelopes
    if (!ps->num_env || ps->border_position[ps->num_env] < numQMFSlots - 1) {
        //Create a fake envelope
        int source = ps->num_env ? ps->num_env - 1 : ps->num_env_old - 1;
        if (source >= 0 && source != ps->num_env) {
            if (ps->enable_iid) {
                memcpy(ps->iid_par+ps->num_env, ps->iid_par+source, sizeof(ps->iid_par[0]));
            }
            if (ps->enable_icc) {
                memcpy(ps->icc_par+ps->num_env, ps->icc_par+source, sizeof(ps->icc_par[0]));
            }
            if (ps->enable_ipdopd) {
                memcpy(ps->ipd_par+ps->num_env, ps->ipd_par+source, sizeof(ps->ipd_par[0]));
                memcpy(ps->opd_par+ps->num_env, ps->opd_par+source, sizeof(ps->opd_par[0]));
            }
        }
        ps->num_env++;
        ps->border_position[ps->num_env] = numQMFSlots - 1;
    }


    ps->is34bands_old = ps->is34bands;
    if (!PS_BASELINE && (ps->enable_iid || ps->enable_icc))
        ps->is34bands = (ps->enable_iid && ps->nr_iid_par == 34) ||
                        (ps->enable_icc && ps->nr_icc_par == 34);

    //Baseline
    if (!ps->enable_ipdopd) {
        memset(ps->ipd_par, 0, sizeof(ps->ipd_par));
        memset(ps->opd_par, 0, sizeof(ps->opd_par));
    }

    if (header)
        ps->start = 1;

    bits_consumed = get_bits_count(gb) - bit_count_start;
    if (bits_consumed <= bits_left) {
        skip_bits_long(gb_host, bits_consumed);
        return bits_consumed;
    }
    av_log(avctx, AV_LOG_ERROR, "Expected to read %d PS bits actually read %d.\n", bits_left, bits_consumed);
err:
    ps->start = 0;
    skip_bits_long(gb_host, bits_left);
    return bits_left;
}
示例#21
0
int ff_flv_decode_picture_header(MpegEncContext *s)
{
    int format, width, height;

    /* picture header */
    if (get_bits_long(&s->gb, 17) != 1) {
        av_log(s->avctx, AV_LOG_ERROR, "Bad picture start code\n");
        return -1;
    }
    format = get_bits(&s->gb, 5);
    if (format != 0 && format != 1) {
        av_log(s->avctx, AV_LOG_ERROR, "Bad picture format\n");
        return -1;
    }
    s->h263_flv = format+1;
    s->picture_number = get_bits(&s->gb, 8); /* picture timestamp */
    format = get_bits(&s->gb, 3);
    switch (format) {
    case 0:
        width = get_bits(&s->gb, 8);
        height = get_bits(&s->gb, 8);
        break;
    case 1:
        width = get_bits(&s->gb, 16);
        height = get_bits(&s->gb, 16);
        break;
    case 2:
        width = 352;
        height = 288;
        break;
    case 3:
        width = 176;
        height = 144;
        break;
    case 4:
        width = 128;
        height = 96;
        break;
    case 5:
        width = 320;
        height = 240;
        break;
    case 6:
        width = 160;
        height = 120;
        break;
    default:
        width = height = 0;
        break;
    }
    if(av_image_check_size(width, height, 0, s->avctx))
        return -1;
    s->width = width;
    s->height = height;

    s->pict_type = AV_PICTURE_TYPE_I + get_bits(&s->gb, 2);
    s->dropable= s->pict_type > AV_PICTURE_TYPE_P;
    if (s->dropable)
        s->pict_type = AV_PICTURE_TYPE_P;

    skip_bits1(&s->gb); /* deblocking flag */
    s->chroma_qscale= s->qscale = get_bits(&s->gb, 5);

    s->h263_plus = 0;

    s->unrestricted_mv = 1;
    s->h263_long_vectors = 0;

    /* PEI */
    while (get_bits1(&s->gb) != 0) {
        skip_bits(&s->gb, 8);
    }
    s->f_code = 1;

    if(s->avctx->debug & FF_DEBUG_PICT_INFO){
        av_log(s->avctx, AV_LOG_DEBUG, "%c esc_type:%d, qp:%d num:%d\n",
               s->dropable ? 'D' : av_get_picture_type_char(s->pict_type), s->h263_flv-1, s->qscale, s->picture_number);
    }

    s->y_dc_scale_table=
    s->c_dc_scale_table= ff_mpeg1_dc_scale_table;

    return 0;
}
示例#22
0
static int mjpegb_decode_frame(AVCodecContext *avctx,
                              void *data, int *data_size,
                              AVPacket *avpkt)
{
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    MJpegDecodeContext *s = avctx->priv_data;
    const uint8_t *buf_end, *buf_ptr;
    AVFrame *picture = data;
    GetBitContext hgb; /* for the header */
    uint32_t dqt_offs, dht_offs, sof_offs, sos_offs, second_field_offs;
    uint32_t field_size, sod_offs;

    buf_ptr = buf;
    buf_end = buf + buf_size;

read_header:
    /* reset on every SOI */
    s->restart_interval = 0;
    s->restart_count = 0;
    s->mjpb_skiptosod = 0;

    init_get_bits(&hgb, buf_ptr, /*buf_size*/(buf_end - buf_ptr)*8);

    skip_bits(&hgb, 32); /* reserved zeros */

    if (get_bits_long(&hgb, 32) != MKBETAG('m','j','p','g'))
    {
        av_log(avctx, AV_LOG_WARNING, "not mjpeg-b (bad fourcc)\n");
        return 0;
    }

    field_size = get_bits_long(&hgb, 32); /* field size */
    av_log(avctx, AV_LOG_DEBUG, "field size: 0x%x\n", field_size);
    skip_bits(&hgb, 32); /* padded field size */
    second_field_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "second_field_offs is %d and size is %d\n");
    av_log(avctx, AV_LOG_DEBUG, "second field offs: 0x%x\n", second_field_offs);

    dqt_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "dqt is %d and size is %d\n");
    av_log(avctx, AV_LOG_DEBUG, "dqt offs: 0x%x\n", dqt_offs);
    if (dqt_offs)
    {
        init_get_bits(&s->gb, buf_ptr+dqt_offs, (buf_end - (buf_ptr+dqt_offs))*8);
        s->start_code = DQT;
        ff_mjpeg_decode_dqt(s);
    }

    dht_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "dht is %d and size is %d\n");
    av_log(avctx, AV_LOG_DEBUG, "dht offs: 0x%x\n", dht_offs);
    if (dht_offs)
    {
        init_get_bits(&s->gb, buf_ptr+dht_offs, (buf_end - (buf_ptr+dht_offs))*8);
        s->start_code = DHT;
        ff_mjpeg_decode_dht(s);
    }

    sof_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "sof is %d and size is %d\n");
    av_log(avctx, AV_LOG_DEBUG, "sof offs: 0x%x\n", sof_offs);
    if (sof_offs)
    {
        init_get_bits(&s->gb, buf_ptr+sof_offs, (buf_end - (buf_ptr+sof_offs))*8);
        s->start_code = SOF0;
        if (ff_mjpeg_decode_sof(s) < 0)
            return -1;
    }

    sos_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "sos is %d and size is %d\n");
    av_log(avctx, AV_LOG_DEBUG, "sos offs: 0x%x\n", sos_offs);
    sod_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "sof is %d and size is %d\n");
    av_log(avctx, AV_LOG_DEBUG, "sod offs: 0x%x\n", sod_offs);
    if (sos_offs)
    {
//        init_get_bits(&s->gb, buf+sos_offs, (buf_end - (buf+sos_offs))*8);
        init_get_bits(&s->gb, buf_ptr+sos_offs, field_size*8);
        s->mjpb_skiptosod = (sod_offs - sos_offs - show_bits(&s->gb, 16));
        s->start_code = SOS;
        ff_mjpeg_decode_sos(s, NULL, NULL);
    }

    if (s->interlaced) {
        s->bottom_field ^= 1;
        /* if not bottom field, do not output image yet */
        if (s->bottom_field != s->interlace_polarity && second_field_offs)
        {
            buf_ptr = buf + second_field_offs;
            second_field_offs = 0;
            goto read_header;
            }
    }

    //XXX FIXME factorize, this looks very similar to the EOI code

    *picture= *s->picture_ptr;
    *data_size = sizeof(AVFrame);

    if(!s->lossless){
        picture->quality= FFMAX3(s->qscale[0], s->qscale[1], s->qscale[2]);
        picture->qstride= 0;
        picture->qscale_table= s->qscale_table;
        memset(picture->qscale_table, picture->quality, (s->width+15)/16);
        if(avctx->debug & FF_DEBUG_QP)
            av_log(avctx, AV_LOG_DEBUG, "QP: %d\n", picture->quality);
        picture->quality*= FF_QP2LAMBDA;
    }

    return buf_ptr - buf;
}
示例#23
0
/**
 * decodes the H261 picture header.
 * @return <0 if no startcode found
 */
static int h261_decode_picture_header(H261Context *h){
    MpegEncContext * const s = &h->s;
    int format, i;
    uint32_t startcode= 0;

    for(i= get_bits_left(&s->gb); i>24; i-=1){
        startcode = ((startcode << 1) | get_bits(&s->gb, 1)) & 0x000FFFFF;

        if(startcode == 0x10)
            break;
    }

    if (startcode != 0x10){
        av_log(s->avctx, AV_LOG_ERROR, "Bad picture start code\n");
        return -1;
    }

    /* temporal reference */
    i= get_bits(&s->gb, 5); /* picture timestamp */
    if(i < (s->picture_number&31))
        i += 32;
    s->picture_number = (s->picture_number&~31) + i;

    s->avctx->time_base= (AVRational){1001, 30000};
    s->current_picture.pts= s->picture_number;


    /* PTYPE starts here */
    skip_bits1(&s->gb); /* split screen off */
    skip_bits1(&s->gb); /* camera  off */
    skip_bits1(&s->gb); /* freeze picture release off */

    format = get_bits1(&s->gb);

    //only 2 formats possible
    if (format == 0){//QCIF
        s->width = 176;
        s->height = 144;
        s->mb_width = 11;
        s->mb_height = 9;
    }else{//CIF
        s->width = 352;
        s->height = 288;
        s->mb_width = 22;
        s->mb_height = 18;
    }

    s->mb_num = s->mb_width * s->mb_height;

    skip_bits1(&s->gb); /* still image mode off */
    skip_bits1(&s->gb); /* Reserved */

    /* PEI */
    while (get_bits1(&s->gb) != 0){
        skip_bits(&s->gb, 8);
    }

    // h261 has no I-FRAMES, but if we pass FF_I_TYPE for the first frame, the codec crashes if it does
    // not contain all I-blocks (e.g. when a packet is lost)
    s->pict_type = FF_P_TYPE;

    h->gob_number = 0;
    return 0;
}
示例#24
0
int parse_dts_header(DTSParserContext *pContext, DTSHeader *pHeader, uint8_t *pBuffer, unsigned uSize)
{
  if(!pContext) return -1;
  if(!pHeader) return -1;

  unsigned ExtDescriptor = 0, ExtCoding = 0;

  uint8_t dts_buffer[32 + FF_INPUT_BUFFER_PADDING_SIZE] = {0};
  int ret = ff_dca_convert_bitstream(pBuffer, uSize, dts_buffer, 32);

  bool is16be = (AV_RB32(pBuffer) == DCA_MARKER_RAW_BE);

  /* Parse Core Header */
  if (ret >= 0) {
    pHeader->HasCore = 1;

    GetBitContext *gb = pContext->gb;
    init_get_bits(gb, dts_buffer, 32 << 3);

    skip_bits_long(gb, 32);                             /* Sync code */
    skip_bits1(gb);                                     /* Frame type */
    pHeader->SamplesPerBlock  = get_bits(gb, 5) + 1;    /* Samples deficit */
    pHeader->CRCPresent       = get_bits1(gb);          /* CRC present */
    pHeader->Blocks           = get_bits(gb, 7) + 1;    /* Number of Blocks */
    pHeader->FrameSize        = get_bits(gb, 14) + 1;   /* Primary (core) Frame Size */
    pHeader->ChannelLayout    = get_bits(gb, 6);        /* Channel configuration */
    unsigned sample_index     = get_bits(gb, 4);        /* Sample frequency index */
    pHeader->SampleRate       = avpriv_dca_sample_rates[sample_index];
    unsigned bitrate_index    = get_bits(gb, 5);        /* Bitrate index */
    pHeader->Bitrate          = dca_bit_rates[bitrate_index];
    skip_bits1(gb);                                     /* Down mix */
    skip_bits1(gb);                                     /* Dynamic range */
    skip_bits1(gb);                                     /* Time stamp */
    skip_bits1(gb);                                     /* Auxiliary data */
    skip_bits1(gb);                                     /* HDCD */
    ExtDescriptor             = get_bits(gb, 3);        /* External descriptor  */
    ExtCoding                 = get_bits1(gb);          /* Extended coding */
    skip_bits1(gb);                                     /* ASPF */
    pHeader->LFE              = get_bits(gb, 2);        /* LFE */
    skip_bits1(gb);                                     /* Predictor History */
    if(pHeader->CRCPresent)
      skip_bits(gb, 16);                                /* CRC */
    skip_bits1(gb);                                     /* Multirate Interpolator */
    skip_bits(gb, 4);                                   /* Encoder Software Revision */
    skip_bits(gb, 2);                                   /* Copy history */
    pHeader->ES = get_bits1(gb);                        /* ES */
    skip_bits(gb, 2);                                   /* PCMR (source PCM resolution) */
    skip_bits1(gb);                                     /* SUMF (Front Sum/Difference Flag) */
    skip_bits1(gb);                                     /* SUMS (Surround Sum/Difference Flag) */
    skip_bits(gb, 4);                                   /* Dialog Normalization Parameter or Unspecified (dependent on encoder version) */

    // Check some basic validity
    if (uSize < pHeader->FrameSize)
      return -1;
  } else {
    pHeader->HasCore = 0;
  }

  if (pHeader->HasCore && !is16be)
    return 0;

  // DTS-HD parsing
  const uint8_t *pHD = nullptr;
  if (pHeader->HasCore) { // If we have a core, only search after the normal buffer
    if (uSize > (pHeader->FrameSize + 4)) { // at least 4 bytes extra, could probably insert a minimal size of a HD header, but so what
      pHD = find_marker32_position(pBuffer + pHeader->FrameSize, uSize - pHeader->FrameSize, DCA_HD_MARKER);
    }
  } else {
    pHD = find_marker32_position(pBuffer, uSize, DCA_HD_MARKER);
  }
  if (pHD) {
    pHeader->IsHD = 1;
    size_t remaining = uSize - (pHD - pBuffer);
    parse_dts_hd_header(pContext, pHeader, pHD, (unsigned)remaining);

    const uint8_t *pXChHD = find_marker32_position(pHD, remaining, DCA_XCH_MARKER);
    if (pXChHD) {
      size_t remaining = uSize - (pXChHD - pBuffer);
      parse_dts_xch_hd_header(pContext, pHeader, pXChHD, (unsigned)remaining);
    }

    const uint8_t *pXXChHD = find_marker32_position(pHD, remaining, DCA_XXCH_MARKER);
    if (pXXChHD) {
      size_t remaining = uSize - (pXXChHD - pBuffer);
      parse_dts_xxch_hd_header(pContext, pHeader, pXXChHD, (unsigned)remaining);
    }
  }

  // Handle DTS extensions
  if (ExtCoding) {
    size_t coreSize = pHD ? (pHD - pBuffer) : uSize;
    if (ExtDescriptor == 0 || ExtDescriptor == 3) {
      const uint8_t *pXCh = find_marker32_position(pBuffer, coreSize, DCA_XCH_MARKER);
      if (pXCh) {
        size_t remaining = coreSize - (pXCh - pBuffer);
        parse_dts_xch_header(pContext, pHeader, pXCh, (unsigned)remaining);
      }
    }
    if (ExtDescriptor == 6) {
      const uint8_t *pXXCh = find_marker32_position(pBuffer, coreSize, DCA_XXCH_MARKER);
      if (pXXCh) {
        size_t remaining = coreSize - (pXXCh - pBuffer);
        parse_dts_xxch_header(pContext, pHeader, pXXCh, (unsigned)remaining);
      }
    }
  }

  return 0;
}
示例#25
0
文件: rdt.c 项目: AronVietti/FFmpeg
int
ff_rdt_parse_header(const uint8_t *buf, int len,
                    int *pset_id, int *pseq_no, int *pstream_id,
                    int *pis_keyframe, uint32_t *ptimestamp)
{
    GetBitContext gb;
    int consumed = 0, set_id, seq_no, stream_id, is_keyframe,
        len_included, need_reliable;
    uint32_t timestamp;

    /* skip status packets */
    while (len >= 5 && buf[1] == 0xFF /* status packet */) {
        int pkt_len;

        if (!(buf[0] & 0x80))
            return -1; /* not followed by a data packet */

        pkt_len = AV_RB16(buf+3);
        buf += pkt_len;
        len -= pkt_len;
        consumed += pkt_len;
    }
    if (len < 16)
        return -1;
    /**
     * Layout of the header (in bits):
     * 1:  len_included
     *     Flag indicating whether this header includes a length field;
     *     this can be used to concatenate multiple RDT packets in a
     *     single UDP/TCP data frame and is used to precede RDT data
     *     by stream status packets
     * 1:  need_reliable
     *     Flag indicating whether this header includes a "reliable
     *     sequence number"; these are apparently sequence numbers of
     *     data packets alone. For data packets, this flag is always
     *     set, according to the Real documentation [1]
     * 5:  set_id
     *     ID of a set of streams of identical content, possibly with
     *     different codecs or bitrates
     * 1:  is_reliable
     *     Flag set for certain streams deemed less tolerable for packet
     *     loss
     * 16: seq_no
     *     Packet sequence number; if >=0xFF00, this is a non-data packet
     *     containing stream status info, the second byte indicates the
     *     type of status packet (see wireshark docs / source code [2])
     * if (len_included) {
     *     16: packet_len
     * } else {
     *     packet_len = remainder of UDP/TCP frame
     * }
     * 1:  is_back_to_back
     *     Back-to-Back flag; used for timing, set for one in every 10
     *     packets, according to the Real documentation [1]
     * 1:  is_slow_data
     *     Slow-data flag; currently unused, according to Real docs [1]
     * 5:  stream_id
     *     ID of the stream within this particular set of streams
     * 1:  is_no_keyframe
     *     Non-keyframe flag (unset if packet belongs to a keyframe)
     * 32: timestamp (PTS)
     * if (set_id == 0x1F) {
     *     16: set_id (extended set-of-streams ID; see set_id)
     * }
     * if (need_reliable) {
     *     16: reliable_seq_no
     *         Reliable sequence number (see need_reliable)
     * }
     * if (stream_id == 0x3F) {
     *     16: stream_id (extended stream ID; see stream_id)
     * }
     * [1] https://protocol.helixcommunity.org/files/2005/devdocs/RDT_Feature_Level_20.txt
     * [2] http://www.wireshark.org/docs/dfref/r/rdt.html and
     *     http://anonsvn.wireshark.org/viewvc/trunk/epan/dissectors/packet-rdt.c
     */
    init_get_bits(&gb, buf, len << 3);
    len_included  = get_bits1(&gb);
    need_reliable = get_bits1(&gb);
    set_id        = get_bits(&gb, 5);
    skip_bits(&gb, 1);
    seq_no        = get_bits(&gb, 16);
    if (len_included)
        skip_bits(&gb, 16);
    skip_bits(&gb, 2);
    stream_id     = get_bits(&gb, 5);
    is_keyframe   = !get_bits1(&gb);
    timestamp     = get_bits_long(&gb, 32);
    if (set_id == 0x1f)
        set_id    = get_bits(&gb, 16);
    if (need_reliable)
        skip_bits(&gb, 16);
    if (stream_id == 0x1f)
        stream_id = get_bits(&gb, 16);

    if (pset_id)      *pset_id      = set_id;
    if (pseq_no)      *pseq_no      = seq_no;
    if (pstream_id)   *pstream_id   = stream_id;
    if (pis_keyframe) *pis_keyframe = is_keyframe;
    if (ptimestamp)   *ptimestamp   = timestamp;

    return consumed + (get_bits_count(&gb) >> 3);
}
示例#26
0
文件: rv10.c 项目: mnzaki/FFmpeg
static int rv20_decode_picture_header(MpegEncContext *s)
{
    int seq, mb_pos, i;
    int rpr_bits;

#if 0
    GetBitContext gb= s->gb;
    for(i=0; i<64; i++) {
        av_log(s->avctx, AV_LOG_DEBUG, "%d", get_bits1(&gb));
        if(i%4==3) av_log(s->avctx, AV_LOG_DEBUG, " ");
    }
    av_log(s->avctx, AV_LOG_DEBUG, "\n");
#endif
#if 0
    av_log(s->avctx, AV_LOG_DEBUG, "%3dx%03d/%02Xx%02X ", s->width, s->height, s->width/4, s->height/4);
    for(i=0; i<s->avctx->extradata_size; i++) {
        av_log(s->avctx, AV_LOG_DEBUG, "%02X ", ((uint8_t*)s->avctx->extradata)[i]);
        if(i%4==3) av_log(s->avctx, AV_LOG_DEBUG, " ");
    }
    av_log(s->avctx, AV_LOG_DEBUG, "\n");
#endif

    i= get_bits(&s->gb, 2);
    switch(i) {
    case 0:
        s->pict_type= AV_PICTURE_TYPE_I;
        break;
    case 1:
        s->pict_type= AV_PICTURE_TYPE_I;
        break; //hmm ...
    case 2:
        s->pict_type= AV_PICTURE_TYPE_P;
        break;
    case 3:
        s->pict_type= AV_PICTURE_TYPE_B;
        break;
    default:
        av_log(s->avctx, AV_LOG_ERROR, "unknown frame type\n");
        return -1;
    }

    if(s->last_picture_ptr==NULL && s->pict_type==AV_PICTURE_TYPE_B) {
        av_log(s->avctx, AV_LOG_ERROR, "early B pix\n");
        return -1;
    }

    if (get_bits1(&s->gb)) {
        av_log(s->avctx, AV_LOG_ERROR, "reserved bit set\n");
        return -1;
    }

    s->qscale = get_bits(&s->gb, 5);
    if(s->qscale==0) {
        av_log(s->avctx, AV_LOG_ERROR, "error, qscale:0\n");
        return -1;
    }

    if(RV_GET_MINOR_VER(s->avctx->sub_id) >= 2)
        s->loop_filter = get_bits1(&s->gb);

    if(RV_GET_MINOR_VER(s->avctx->sub_id) <= 1)
        seq = get_bits(&s->gb, 8) << 7;
    else
        seq = get_bits(&s->gb, 13) << 2;

    rpr_bits = s->avctx->extradata[1] & 7;
    if(rpr_bits) {
        int f, new_w, new_h;
        rpr_bits = FFMIN((rpr_bits >> 1) + 1, 3);

        f = get_bits(&s->gb, rpr_bits);

        if(f) {
            new_w= 4*((uint8_t*)s->avctx->extradata)[6+2*f];
            new_h= 4*((uint8_t*)s->avctx->extradata)[7+2*f];
        } else {
            new_w= s->orig_width ;
            new_h= s->orig_height;
        }
        if(new_w != s->width || new_h != s->height) {
            av_log(s->avctx, AV_LOG_DEBUG, "attempting to change resolution to %dx%d\n", new_w, new_h);
            if (av_image_check_size(new_w, new_h, 0, s->avctx) < 0)
                return -1;
            MPV_common_end(s);
            avcodec_set_dimensions(s->avctx, new_w, new_h);
            s->width  = new_w;
            s->height = new_h;
            if (MPV_common_init(s) < 0)
                return -1;
        }

        if(s->avctx->debug & FF_DEBUG_PICT_INFO) {
            av_log(s->avctx, AV_LOG_DEBUG, "F %d/%d\n", f, rpr_bits);
        }
    }

    mb_pos = ff_h263_decode_mba(s);

//av_log(s->avctx, AV_LOG_DEBUG, "%d\n", seq);
    seq |= s->time &~0x7FFF;
    if(seq - s->time >  0x4000) seq -= 0x8000;
    if(seq - s->time < -0x4000) seq += 0x8000;
    if(seq != s->time) {
        if(s->pict_type!=AV_PICTURE_TYPE_B) {
            s->time= seq;
            s->pp_time= s->time - s->last_non_b_time;
            s->last_non_b_time= s->time;
        } else {
            s->time= seq;
            s->pb_time= s->pp_time - (s->last_non_b_time - s->time);
            if(s->pp_time <=s->pb_time || s->pp_time <= s->pp_time - s->pb_time || s->pp_time<=0) {
                av_log(s->avctx, AV_LOG_DEBUG, "messed up order, possible from seeking? skipping current b frame\n");
                return FRAME_SKIPPED;
            }
            ff_mpeg4_init_direct_mv(s);
        }
    }
//    printf("%d %d %d %d %d\n", seq, (int)s->time, (int)s->last_non_b_time, s->pp_time, s->pb_time);
    /*for(i=0; i<32; i++){
        av_log(s->avctx, AV_LOG_DEBUG, "%d", get_bits1(&s->gb));
    }
    av_log(s->avctx, AV_LOG_DEBUG, "\n");*/
    s->no_rounding= get_bits1(&s->gb);

    if(RV_GET_MINOR_VER(s->avctx->sub_id) <= 1 && s->pict_type == AV_PICTURE_TYPE_B)
        skip_bits(&s->gb, 5); // binary decoder reads 3+2 bits here but they don't seem to be used

    s->f_code = 1;
    s->unrestricted_mv = 1;
    s->h263_aic= s->pict_type == AV_PICTURE_TYPE_I;
//    s->alt_inter_vlc=1;
//    s->obmc=1;
//    s->umvplus=1;
    s->modified_quant=1;
    if(!s->avctx->lowres)
        s->loop_filter=1;

    if(s->avctx->debug & FF_DEBUG_PICT_INFO) {
        av_log(s->avctx, AV_LOG_INFO, "num:%5d x:%2d y:%2d type:%d qscale:%2d rnd:%d\n",
               seq, s->mb_x, s->mb_y, s->pict_type, s->qscale, s->no_rounding);
    }

    assert(s->pict_type != AV_PICTURE_TYPE_B || !s->low_delay);

    return s->mb_width*s->mb_height - mb_pos;
}
示例#27
0
static int decode_frame(FLACContext *s)
{
    int i, ret;
    GetBitContext *gb = &s->gb;
    FLACFrameInfo fi;

    if ((ret = ff_flac_decode_frame_header(s->avctx, gb, &fi, 0)) < 0) {
        av_log(s->avctx, AV_LOG_ERROR, "invalid frame header\n");
        return ret;
    }

    if (s->channels && fi.channels != s->channels && s->got_streaminfo) {
        s->channels = s->avctx->channels = fi.channels;
        ff_flac_set_channel_layout(s->avctx);
        ret = allocate_buffers(s);
        if (ret < 0)
            return ret;
    }
    s->channels = s->avctx->channels = fi.channels;
    if (!s->avctx->channel_layout)
        ff_flac_set_channel_layout(s->avctx);
    s->ch_mode = fi.ch_mode;

    if (!s->bps && !fi.bps) {
        av_log(s->avctx, AV_LOG_ERROR, "bps not found in STREAMINFO or frame header\n");
        return AVERROR_INVALIDDATA;
    }
    if (!fi.bps) {
        fi.bps = s->bps;
    } else if (s->bps && fi.bps != s->bps) {
        av_log(s->avctx, AV_LOG_ERROR, "switching bps mid-stream is not "
                                       "supported\n");
        return AVERROR_INVALIDDATA;
    }

    if (!s->bps) {
        s->bps = s->avctx->bits_per_raw_sample = fi.bps;
        flac_set_bps(s);
    }

    if (!s->max_blocksize)
        s->max_blocksize = FLAC_MAX_BLOCKSIZE;
    if (fi.blocksize > s->max_blocksize) {
        av_log(s->avctx, AV_LOG_ERROR, "blocksize %d > %d\n", fi.blocksize,
               s->max_blocksize);
        return AVERROR_INVALIDDATA;
    }
    s->blocksize = fi.blocksize;

    if (!s->samplerate && !fi.samplerate) {
        av_log(s->avctx, AV_LOG_ERROR, "sample rate not found in STREAMINFO"
                                        " or frame header\n");
        return AVERROR_INVALIDDATA;
    }
    if (fi.samplerate == 0)
        fi.samplerate = s->samplerate;
    s->samplerate = s->avctx->sample_rate = fi.samplerate;

    if (!s->got_streaminfo) {
        ret = allocate_buffers(s);
        if (ret < 0)
            return ret;
        ff_flacdsp_init(&s->dsp, s->avctx->sample_fmt, s->bps);
        s->got_streaminfo = 1;
        dump_headers(s->avctx, (FLACStreaminfo *)s);
    }

//    dump_headers(s->avctx, (FLACStreaminfo *)s);

    /* subframes */
    for (i = 0; i < s->channels; i++) {
        if ((ret = decode_subframe(s, i)) < 0)
            return ret;
    }

    align_get_bits(gb);

    /* frame footer */
    skip_bits(gb, 16); /* data crc */

    return 0;
}
示例#28
0
static int flashsv_decode_frame(AVCodecContext *avctx,
                                    void *data, int *data_size,
                                    uint8_t *buf, int buf_size)
{
    FlashSVContext *s = avctx->priv_data;
    int h_blocks, v_blocks, h_part, v_part, i, j;
    GetBitContext gb;

    /* no supplementary picture */
    if (buf_size == 0)
        return 0;

    if(s->frame.data[0])
            avctx->release_buffer(avctx, &s->frame);

    init_get_bits(&gb, buf, buf_size * 8);

    /* start to parse the bitstream */
    s->block_width = 16* (get_bits(&gb, 4)+1);
    s->image_width =     get_bits(&gb,12);
    s->block_height= 16* (get_bits(&gb, 4)+1);
    s->image_height=     get_bits(&gb,12);

    /* calculate amount of blocks and the size of the border blocks */
    h_blocks = s->image_width / s->block_width;
    h_part = s->image_width % s->block_width;
    v_blocks = s->image_height / s->block_height;
    v_part = s->image_height % s->block_height;

    /* the block size could change between frames, make sure the buffer
     * is large enough, if not, get a larger one */
    if(s->block_size < s->block_width*s->block_height) {
        if (s->tmpblock != NULL)
            av_free(s->tmpblock);
        if ((s->tmpblock = av_malloc(3*s->block_width*s->block_height)) == NULL) {
            av_log(avctx, AV_LOG_ERROR, "Can't allocate decompression buffer.\n");
            return -1;
        }
    }
    s->block_size = s->block_width*s->block_height;

    /* init the image size once */
    if((avctx->width==0) && (avctx->height==0)){
        avctx->width = s->image_width;
        avctx->height = s->image_height;
    }

    /* check for changes of image width and image height */
    if ((avctx->width != s->image_width) || (avctx->height != s->image_height)) {
        av_log(avctx, AV_LOG_ERROR, "Frame width or height differs from first frames!\n");
        av_log(avctx, AV_LOG_ERROR, "fh = %d, fv %d  vs  ch = %d, cv = %d\n",avctx->height,
        avctx->width,s->image_height,s->image_width);
        return -1;
    }

    av_log(avctx, AV_LOG_DEBUG, "image: %dx%d block: %dx%d num: %dx%d part: %dx%d\n",
        s->image_width, s->image_height, s->block_width, s->block_height,
        h_blocks, v_blocks, h_part, v_part);

    s->frame.reference = 1;
    s->frame.buffer_hints = FF_BUFFER_HINTS_VALID;
    if (avctx->get_buffer(avctx, &s->frame) < 0) {
        av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed\n");
        return -1;
    }

    /* loop over all block columns */
    for (j = 0; j < v_blocks + (v_part?1:0); j++)
    {

        int hp = j*s->block_height; // horiz position in frame
        int hs = (j<v_blocks)?s->block_height:v_part; // size of block


        /* loop over all block rows */
        for (i = 0; i < h_blocks + (h_part?1:0); i++)
        {
            int wp = i*s->block_width; // vert position in frame
            int ws = (i<h_blocks)?s->block_width:h_part; // size of block

            /* get the size of the compressed zlib chunk */
            int size = get_bits(&gb, 16);

            if (size == 0) {
                /* no change, don't do anything */
            } else {
                /* decompress block */
                int ret = inflateReset(&(s->zstream));
                if (ret != Z_OK)
                {
                    av_log(avctx, AV_LOG_ERROR, "error in decompression (reset) of block %dx%d\n", i, j);
                    /* return -1; */
                }
                s->zstream.next_in = buf+(get_bits_count(&gb)/8);
                s->zstream.avail_in = size;
                s->zstream.next_out = s->tmpblock;
                s->zstream.avail_out = s->block_size*3;
                ret = inflate(&(s->zstream), Z_FINISH);
                if (ret == Z_DATA_ERROR)
                {
                    av_log(avctx, AV_LOG_ERROR, "Zlib resync occured\n");
                    inflateSync(&(s->zstream));
                    ret = inflate(&(s->zstream), Z_FINISH);
                }

                if ((ret != Z_OK) && (ret != Z_STREAM_END))
                {
                    av_log(avctx, AV_LOG_ERROR, "error in decompression of block %dx%d: %d\n", i, j, ret);
                    /* return -1; */
                }
                copy_region(s->tmpblock, s->frame.data[0], s->image_height-(hp+hs+1), wp, hs, ws, s->frame.linesize[0]);
                skip_bits(&gb, 8*size);   /* skip the consumed bits */
            }
        }
    }

    *data_size = sizeof(AVFrame);
    *(AVFrame*)data = s->frame;

    if ((get_bits_count(&gb)/8) != buf_size)
        av_log(avctx, AV_LOG_ERROR, "buffer not fully consumed (%d != %d)\n",
            buf_size, (get_bits_count(&gb)/8));

    /* report that the buffer was completely consumed */
    return buf_size;
}
void ff_rtp_send_h263_rfc2190(AVFormatContext *s1, const uint8_t *buf, int size,
                              const uint8_t *mb_info, int mb_info_size)
{
    RTPMuxContext *s = s1->priv_data;
    int len, sbits = 0, ebits = 0;
    GetBitContext gb;
    struct H263Info info = { 0 };
    struct H263State state = { 0 };
    int mb_info_pos = 0, mb_info_count = mb_info_size / 12;
    const uint8_t *buf_base = buf;

    s->timestamp = s->cur_timestamp;

    init_get_bits(&gb, buf, size*8);
    if (get_bits(&gb, 22) == 0x20) { /* Picture Start Code */
        info.tr  = get_bits(&gb, 8);
        skip_bits(&gb, 2); /* PTYPE start, H261 disambiguation */
        skip_bits(&gb, 3); /* Split screen, document camera, freeze picture release */
        info.src = get_bits(&gb, 3);
        info.i   = get_bits(&gb, 1);
        info.u   = get_bits(&gb, 1);
        info.s   = get_bits(&gb, 1);
        info.a   = get_bits(&gb, 1);
        info.pb  = get_bits(&gb, 1);
    }

    while (size > 0) {
        struct H263State packet_start_state = state;
        len = FFMIN(s->max_payload_size - 8, size);

        /* Look for a better place to split the frame into packets. */
        if (len < size) {
            const uint8_t *end = ff_h263_find_resync_marker_reverse(buf,
                                                                    buf + len);
            len = end - buf;
            if (len == s->max_payload_size - 8) {
                /* Skip mb info prior to the start of the current ptr */
                while (mb_info_pos < mb_info_count) {
                    uint32_t pos = AV_RL32(&mb_info[12*mb_info_pos])/8;
                    if (pos >= buf - buf_base)
                        break;
                    mb_info_pos++;
                }
                /* Find the first mb info past the end pointer */
                while (mb_info_pos + 1 < mb_info_count) {
                    uint32_t pos = AV_RL32(&mb_info[12*(mb_info_pos + 1)])/8;
                    if (pos >= end - buf_base)
                        break;
                    mb_info_pos++;
                }
                if (mb_info_pos < mb_info_count) {
                    const uint8_t *ptr = &mb_info[12*mb_info_pos];
                    uint32_t bit_pos = AV_RL32(ptr);
                    uint32_t pos = (bit_pos + 7)/8;
                    if (pos <= end - buf_base) {
                        state.quant = ptr[4];
                        state.gobn  = ptr[5];
                        state.mba   = AV_RL16(&ptr[6]);
                        state.hmv1  = (int8_t) ptr[8];
                        state.vmv1  = (int8_t) ptr[9];
                        state.hmv2  = (int8_t) ptr[10];
                        state.vmv2  = (int8_t) ptr[11];
                        ebits = 8 * pos - bit_pos;
                        len   = pos - (buf - buf_base);
                        mb_info_pos++;
                    } else {
                        av_log(s1, AV_LOG_ERROR,
                               "Unable to split H263 packet, use -mb_info %d "
                               "or lower.\n", s->max_payload_size - 8);
                    }
                } else {
                    av_log(s1, AV_LOG_ERROR, "Unable to split H263 packet, "
                           "use -mb_info %d or -ps 1.\n",
                           s->max_payload_size - 8);
                }
            }
        }

        if (size > 2 && !buf[0] && !buf[1])
            send_mode_a(s1, &info, buf, len, ebits, len == size);
        else
            send_mode_b(s1, &info, &packet_start_state, buf, len, sbits,
                        ebits, len == size);

        if (ebits) {
            sbits = 8 - ebits;
            len--;
        } else {
            sbits = 0;
        }
        buf  += len;
        size -= len;
        ebits = 0;
    }
}
示例#30
0
static void slice_header(struct h265_private *p)
{
	int i;

	p->slice.first_slice_segment_in_pic_flag = get_u(p->regs, 1);

	if (p->nal_unit_type >= 16 && p->nal_unit_type <= 23)
		p->slice.no_output_of_prior_pics_flag = get_u(p->regs, 1);

	p->slice.slice_pic_parameter_set_id = get_ue(p->regs);

	if (!p->slice.first_slice_segment_in_pic_flag)
	{
		if (p->info->dependent_slice_segments_enabled_flag)
			p->slice.dependent_slice_segment_flag = get_u(p->regs, 1);

		p->slice.slice_segment_address = get_u(p->regs, ceil_log2(PicSizeInCtbsY));
	}

	if (!p->slice.dependent_slice_segment_flag)
	{
		p->slice.pic_output_flag = 1;
		p->slice.num_ref_idx_l0_active_minus1 = p->info->num_ref_idx_l0_default_active_minus1;
		p->slice.num_ref_idx_l1_active_minus1 = p->info->num_ref_idx_l1_default_active_minus1;
		p->slice.collocated_from_l0_flag = 1;
		p->slice.slice_deblocking_filter_disabled_flag = p->info->pps_deblocking_filter_disabled_flag;
		p->slice.slice_beta_offset_div2 = p->info->pps_beta_offset_div2;
		p->slice.slice_tc_offset_div2 = p->info->pps_tc_offset_div2;
		p->slice.slice_loop_filter_across_slices_enabled_flag = p->info->pps_loop_filter_across_slices_enabled_flag;

		skip_bits(p->regs, p->info->num_extra_slice_header_bits);

		p->slice.slice_type = get_ue(p->regs);

		if (p->info->output_flag_present_flag)
			p->slice.pic_output_flag = get_u(p->regs, 1);

		if (p->info->separate_colour_plane_flag == 1)
			p->slice.colour_plane_id = get_u(p->regs, 2);

		if (p->nal_unit_type != 19 && p->nal_unit_type != 20)
		{
			p->slice.slice_pic_order_cnt_lsb = get_u(p->regs, p->info->log2_max_pic_order_cnt_lsb_minus4 + 4);

			p->slice.short_term_ref_pic_set_sps_flag = get_u(p->regs, 1);

			skip_bits(p->regs, p->info->NumShortTermPictureSliceHeaderBits);

			if (p->info->long_term_ref_pics_present_flag)
				skip_bits(p->regs, p->info->NumLongTermPictureSliceHeaderBits);

			if (p->info->sps_temporal_mvp_enabled_flag)
				p->slice.slice_temporal_mvp_enabled_flag = get_u(p->regs, 1);
		}

		if (p->info->sample_adaptive_offset_enabled_flag)
		{
			p->slice.slice_sao_luma_flag = get_u(p->regs, 1);
			p->slice.slice_sao_chroma_flag = get_u(p->regs, 1);
		}

		if (p->slice.slice_type == SLICE_P || p->slice.slice_type == SLICE_B)
		{
			p->slice.num_ref_idx_active_override_flag = get_u(p->regs, 1);

			if (p->slice.num_ref_idx_active_override_flag)
			{
				p->slice.num_ref_idx_l0_active_minus1 = get_ue(p->regs);
				if (p->slice.slice_type == SLICE_B)
					p->slice.num_ref_idx_l1_active_minus1 = get_ue(p->regs);
			}

			if (p->info->lists_modification_present_flag && p->info->NumPocTotalCurr > 1)
				ref_pic_lists_modification(p);

			if (p->slice.slice_type == SLICE_B)
				p->slice.mvd_l1_zero_flag = get_u(p->regs, 1);

			if (p->info->cabac_init_present_flag)
				p->slice.cabac_init_flag = get_u(p->regs, 1);

			if (p->slice.slice_temporal_mvp_enabled_flag)
			{
				if (p->slice.slice_type == SLICE_B)
					p->slice.collocated_from_l0_flag = get_u(p->regs, 1);

				if ((p->slice.collocated_from_l0_flag && p->slice.num_ref_idx_l0_active_minus1 > 0) || (!p->slice.collocated_from_l0_flag && p->slice.num_ref_idx_l1_active_minus1 > 0))
					p->slice.collocated_ref_idx = get_ue(p->regs);
			}

			if ((p->info->weighted_pred_flag && p->slice.slice_type == SLICE_P) || (p->info->weighted_bipred_flag && p->slice.slice_type == SLICE_B))
				pred_weight_table(p);

			p->slice.five_minus_max_num_merge_cand = get_ue(p->regs);
		}

		p->slice.slice_qp_delta = get_se(p->regs);

		if (p->info->pps_slice_chroma_qp_offsets_present_flag)
		{
			p->slice.slice_cb_qp_offset = get_se(p->regs);
			p->slice.slice_cr_qp_offset = get_se(p->regs);
		}

		if (p->info->deblocking_filter_override_enabled_flag)
			p->slice.deblocking_filter_override_flag = get_u(p->regs, 1);

		if (p->slice.deblocking_filter_override_flag)
		{
			p->slice.slice_deblocking_filter_disabled_flag = get_u(p->regs, 1);

			if (!p->slice.slice_deblocking_filter_disabled_flag)
			{
				p->slice.slice_beta_offset_div2 = get_se(p->regs);
				p->slice.slice_tc_offset_div2 = get_se(p->regs);
			}
		}

		if (p->info->pps_loop_filter_across_slices_enabled_flag && (p->slice.slice_sao_luma_flag || p->slice.slice_sao_chroma_flag || !p->slice.slice_deblocking_filter_disabled_flag))
			p->slice.slice_loop_filter_across_slices_enabled_flag = get_u(p->regs, 1);
	}

	if (p->info->tiles_enabled_flag || p->info->entropy_coding_sync_enabled_flag)
	{
		p->slice.num_entry_point_offsets = get_ue(p->regs);

		if (p->slice.num_entry_point_offsets > 0)
		{
			p->slice.offset_len_minus1 = get_ue(p->regs);

			for (i = 0; i < p->slice.num_entry_point_offsets; i++)
				p->slice.entry_point_offset_minus1[i] = get_u(p->regs, p->slice.offset_len_minus1 + 1);
		}
	}

	if (p->info->slice_segment_header_extension_present_flag)
		skip_bits(p->regs, get_ue(p->regs) * 8);
}