Beispiel #1
0
static int hq_decode_frame(HQContext *ctx, AVFrame *pic,
                           int prof_num, size_t data_size)
{
    const HQProfile *profile;
    GetBitContext gb;
    const uint8_t *perm, *src = ctx->gbc.buffer;
    uint32_t slice_off[21];
    int slice, start_off, next_off, i, ret;

    if (prof_num >= NUM_HQ_PROFILES) {
        profile = &ff_hq_profile[0];
        avpriv_request_sample(ctx->avctx, "HQ Profile %d", prof_num);
    } else {
        profile = &ff_hq_profile[prof_num];
        av_log(ctx->avctx, AV_LOG_VERBOSE, "HQ Profile %d\n", prof_num);
    }

    ctx->avctx->coded_width         = FFALIGN(profile->width,  16);
    ctx->avctx->coded_height        = FFALIGN(profile->height, 16);
    ctx->avctx->width               = profile->width;
    ctx->avctx->height              = profile->height;
    ctx->avctx->bits_per_raw_sample = 8;
    ctx->avctx->pix_fmt             = AV_PIX_FMT_YUV422P;

    ret = ff_get_buffer(ctx->avctx, pic, 0);
    if (ret < 0) {
        av_log(ctx->avctx, AV_LOG_ERROR, "Could not allocate buffer.\n");
        return ret;
    }

    /* Offsets are stored from CUV position, so adjust them accordingly. */
    for (i = 0; i < profile->num_slices + 1; i++)
        slice_off[i] = bytestream2_get_be24(&ctx->gbc) - 4;

    next_off = 0;
    for (slice = 0; slice < profile->num_slices; slice++) {
        start_off = next_off;
        next_off  = profile->tab_h * (slice + 1) / profile->num_slices;
        perm = profile->perm_tab + start_off * profile->tab_w * 2;

        if (slice_off[slice] < (profile->num_slices + 1) * 3 ||
            slice_off[slice] >= slice_off[slice + 1] ||
            slice_off[slice + 1] > data_size) {
            av_log(ctx->avctx, AV_LOG_ERROR,
                   "Invalid slice size %zu.\n", data_size);
            break;
        }
        init_get_bits(&gb, src + slice_off[slice],
                      (slice_off[slice + 1] - slice_off[slice]) * 8);

        for (i = 0; i < (next_off - start_off) * profile->tab_w; i++) {
            ret = hq_decode_mb(ctx, pic, &gb, perm[0] * 16, perm[1] * 16);
            if (ret < 0) {
                av_log(ctx->avctx, AV_LOG_ERROR,
                       "Error decoding macroblock %d at slice %d.\n", i, slice);
                return ret;
            }
            perm += 2;
        }
    }

    return 0;
}
Beispiel #2
0
static av_cold int mpc8_decode_init(AVCodecContext * avctx)
{
    int i;
    MPCContext *c = avctx->priv_data;
    GetBitContext gb;
    static int vlc_initialized = 0;
    int channels;

    static VLC_TYPE band_table[542][2];
    static VLC_TYPE q1_table[520][2];
    static VLC_TYPE q9up_table[524][2];
    static VLC_TYPE scfi0_table[1 << MPC8_SCFI0_BITS][2];
    static VLC_TYPE scfi1_table[1 << MPC8_SCFI1_BITS][2];
    static VLC_TYPE dscf0_table[560][2];
    static VLC_TYPE dscf1_table[598][2];
    static VLC_TYPE q3_0_table[512][2];
    static VLC_TYPE q3_1_table[516][2];
    static VLC_TYPE codes_table[5708][2];

    if(avctx->extradata_size < 2){
        av_log(avctx, AV_LOG_ERROR, "Too small extradata size (%i)!\n", avctx->extradata_size);
        return -1;
    }
    memset(c->oldDSCF, 0, sizeof(c->oldDSCF));
    av_lfg_init(&c->rnd, 0xDEADBEEF);
    ff_dsputil_init(&c->dsp, avctx);
    ff_mpadsp_init(&c->mpadsp);

    ff_mpc_init();

    init_get_bits(&gb, avctx->extradata, 16);

    skip_bits(&gb, 3);//sample rate
    c->maxbands = get_bits(&gb, 5) + 1;
    if (c->maxbands >= BANDS) {
        av_log(avctx,AV_LOG_ERROR, "maxbands %d too high\n", c->maxbands);
        return AVERROR_INVALIDDATA;
    }
    channels = get_bits(&gb, 4) + 1;
    if (channels > 2) {
        avpriv_request_sample(avctx, "Multichannel MPC SV8");
        return AVERROR_PATCHWELCOME;
    }
    c->MSS = get_bits1(&gb);
    c->frames = 1 << (get_bits(&gb, 3) * 2);

    avctx->sample_fmt = AV_SAMPLE_FMT_S16P;
    avctx->channel_layout = (channels==2) ? AV_CH_LAYOUT_STEREO : AV_CH_LAYOUT_MONO;
    avctx->channels = channels;

    if(vlc_initialized) return 0;
    av_log(avctx, AV_LOG_DEBUG, "Initing VLC\n");

    band_vlc.table = band_table;
    band_vlc.table_allocated = 542;
    init_vlc(&band_vlc, MPC8_BANDS_BITS, MPC8_BANDS_SIZE,
             mpc8_bands_bits,  1, 1,
             mpc8_bands_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);

    q1_vlc.table = q1_table;
    q1_vlc.table_allocated = 520;
    init_vlc(&q1_vlc, MPC8_Q1_BITS, MPC8_Q1_SIZE,
             mpc8_q1_bits,  1, 1,
             mpc8_q1_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);
    q9up_vlc.table = q9up_table;
    q9up_vlc.table_allocated = 524;
    init_vlc(&q9up_vlc, MPC8_Q9UP_BITS, MPC8_Q9UP_SIZE,
             mpc8_q9up_bits,  1, 1,
             mpc8_q9up_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);

    scfi_vlc[0].table = scfi0_table;
    scfi_vlc[0].table_allocated = 1 << MPC8_SCFI0_BITS;
    init_vlc(&scfi_vlc[0], MPC8_SCFI0_BITS, MPC8_SCFI0_SIZE,
             mpc8_scfi0_bits,  1, 1,
             mpc8_scfi0_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);
    scfi_vlc[1].table = scfi1_table;
    scfi_vlc[1].table_allocated = 1 << MPC8_SCFI1_BITS;
    init_vlc(&scfi_vlc[1], MPC8_SCFI1_BITS, MPC8_SCFI1_SIZE,
             mpc8_scfi1_bits,  1, 1,
             mpc8_scfi1_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);

    dscf_vlc[0].table = dscf0_table;
    dscf_vlc[0].table_allocated = 560;
    init_vlc(&dscf_vlc[0], MPC8_DSCF0_BITS, MPC8_DSCF0_SIZE,
             mpc8_dscf0_bits,  1, 1,
             mpc8_dscf0_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);
    dscf_vlc[1].table = dscf1_table;
    dscf_vlc[1].table_allocated = 598;
    init_vlc(&dscf_vlc[1], MPC8_DSCF1_BITS, MPC8_DSCF1_SIZE,
             mpc8_dscf1_bits,  1, 1,
             mpc8_dscf1_codes, 1, 1, INIT_VLC_USE_NEW_STATIC);

    q3_vlc[0].table = q3_0_table;
    q3_vlc[0].table_allocated = 512;
    ff_init_vlc_sparse(&q3_vlc[0], MPC8_Q3_BITS, MPC8_Q3_SIZE,
             mpc8_q3_bits,  1, 1,
             mpc8_q3_codes, 1, 1,
             mpc8_q3_syms,  1, 1, INIT_VLC_USE_NEW_STATIC);
    q3_vlc[1].table = q3_1_table;
    q3_vlc[1].table_allocated = 516;
    ff_init_vlc_sparse(&q3_vlc[1], MPC8_Q4_BITS, MPC8_Q4_SIZE,
             mpc8_q4_bits,  1, 1,
             mpc8_q4_codes, 1, 1,
             mpc8_q4_syms,  1, 1, INIT_VLC_USE_NEW_STATIC);

    for(i = 0; i < 2; i++){
        res_vlc[i].table = &codes_table[vlc_offsets[0+i]];
        res_vlc[i].table_allocated = vlc_offsets[1+i] - vlc_offsets[0+i];
        init_vlc(&res_vlc[i], MPC8_RES_BITS, MPC8_RES_SIZE,
                 &mpc8_res_bits[i],  1, 1,
                 &mpc8_res_codes[i], 1, 1, INIT_VLC_USE_NEW_STATIC);

        q2_vlc[i].table = &codes_table[vlc_offsets[2+i]];
        q2_vlc[i].table_allocated = vlc_offsets[3+i] - vlc_offsets[2+i];
        init_vlc(&q2_vlc[i], MPC8_Q2_BITS, MPC8_Q2_SIZE,
                 &mpc8_q2_bits[i],  1, 1,
                 &mpc8_q2_codes[i], 1, 1, INIT_VLC_USE_NEW_STATIC);

        quant_vlc[0][i].table = &codes_table[vlc_offsets[4+i]];
        quant_vlc[0][i].table_allocated = vlc_offsets[5+i] - vlc_offsets[4+i];
        init_vlc(&quant_vlc[0][i], MPC8_Q5_BITS, MPC8_Q5_SIZE,
                 &mpc8_q5_bits[i],  1, 1,
                 &mpc8_q5_codes[i], 1, 1, INIT_VLC_USE_NEW_STATIC);
        quant_vlc[1][i].table = &codes_table[vlc_offsets[6+i]];
        quant_vlc[1][i].table_allocated = vlc_offsets[7+i] - vlc_offsets[6+i];
        init_vlc(&quant_vlc[1][i], MPC8_Q6_BITS, MPC8_Q6_SIZE,
                 &mpc8_q6_bits[i],  1, 1,
                 &mpc8_q6_codes[i], 1, 1, INIT_VLC_USE_NEW_STATIC);
        quant_vlc[2][i].table = &codes_table[vlc_offsets[8+i]];
        quant_vlc[2][i].table_allocated = vlc_offsets[9+i] - vlc_offsets[8+i];
        init_vlc(&quant_vlc[2][i], MPC8_Q7_BITS, MPC8_Q7_SIZE,
                 &mpc8_q7_bits[i],  1, 1,
                 &mpc8_q7_codes[i], 1, 1, INIT_VLC_USE_NEW_STATIC);
        quant_vlc[3][i].table = &codes_table[vlc_offsets[10+i]];
        quant_vlc[3][i].table_allocated = vlc_offsets[11+i] - vlc_offsets[10+i];
        init_vlc(&quant_vlc[3][i], MPC8_Q8_BITS, MPC8_Q8_SIZE,
                 &mpc8_q8_bits[i],  1, 1,
                 &mpc8_q8_codes[i], 1, 1, INIT_VLC_USE_NEW_STATIC);
    }
    vlc_initialized = 1;

    return 0;
}
Beispiel #3
0
static int decode_frame(AVCodecContext *avctx, void *data,
                        int *got_frame, AVPacket *avpkt)
{
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    C93DecoderContext * const c93 = avctx->priv_data;
    AVFrame * const newpic = c93->pictures[c93->currentpic];
    AVFrame * const oldpic = c93->pictures[c93->currentpic^1];
    GetByteContext gb;
    uint8_t *out;
    int stride, ret, i, x, y, b, bt = 0;

    if ((ret = ff_set_dimensions(avctx, WIDTH, HEIGHT)) < 0)
        return ret;

    c93->currentpic ^= 1;

    if ((ret = ff_reget_buffer(avctx, newpic)) < 0)
        return ret;

    stride = newpic->linesize[0];

    bytestream2_init(&gb, buf, buf_size);
    b = bytestream2_get_byte(&gb);
    if (b & C93_FIRST_FRAME) {
        newpic->pict_type = AV_PICTURE_TYPE_I;
        newpic->key_frame = 1;
    } else {
        newpic->pict_type = AV_PICTURE_TYPE_P;
        newpic->key_frame = 0;
    }

    for (y = 0; y < HEIGHT; y += 8) {
        out = newpic->data[0] + y * stride;
        for (x = 0; x < WIDTH; x += 8) {
            uint8_t *copy_from = oldpic->data[0];
            unsigned int offset, j;
            uint8_t cols[4], grps[4];
            C93BlockType block_type;

            if (!bt)
                bt = bytestream2_get_byte(&gb);

            block_type= bt & 0x0F;
            switch (block_type) {
            case C93_8X8_FROM_PREV:
                offset = bytestream2_get_le16(&gb);
                if ((ret = copy_block(avctx, out, copy_from, offset, 8, stride)) < 0)
                    return ret;
                break;

            case C93_4X4_FROM_CURR:
                copy_from = newpic->data[0];
            case C93_4X4_FROM_PREV:
                for (j = 0; j < 8; j += 4) {
                    for (i = 0; i < 8; i += 4) {
                        int offset = bytestream2_get_le16(&gb);
                        int from_x = offset % WIDTH;
                        int from_y = offset / WIDTH;
                        if (block_type == C93_4X4_FROM_CURR && from_y == y+j &&
                            (FFABS(from_x - x-i) < 4 || FFABS(from_x - x-i) > WIDTH-4)) {
                            avpriv_request_sample(avctx, "block overlap %d %d %d %d", from_x, x+i, from_y, y+j);
                            return AVERROR_INVALIDDATA;
                        }
                        if ((ret = copy_block(avctx, &out[j*stride+i],
                                              copy_from, offset, 4, stride)) < 0)
                            return ret;
                    }
                }
                break;

            case C93_8X8_2COLOR:
                bytestream2_get_buffer(&gb, cols, 2);
                for (i = 0; i < 8; i++) {
                    draw_n_color(out + i*stride, stride, 8, 1, 1, cols,
                                     NULL, bytestream2_get_byte(&gb));
                }

                break;

            case C93_4X4_2COLOR:
            case C93_4X4_4COLOR:
            case C93_4X4_4COLOR_GRP:
                for (j = 0; j < 8; j += 4) {
                    for (i = 0; i < 8; i += 4) {
                        if (block_type == C93_4X4_2COLOR) {
                            bytestream2_get_buffer(&gb, cols, 2);
                            draw_n_color(out + i + j*stride, stride, 4, 4,
                                    1, cols, NULL, bytestream2_get_le16(&gb));
                        } else if (block_type == C93_4X4_4COLOR) {
                            bytestream2_get_buffer(&gb, cols, 4);
                            draw_n_color(out + i + j*stride, stride, 4, 4,
                                    2, cols, NULL, bytestream2_get_le32(&gb));
                        } else {
                            bytestream2_get_buffer(&gb, grps, 4);
                            draw_n_color(out + i + j*stride, stride, 4, 4,
                                    1, cols, grps, bytestream2_get_le16(&gb));
                        }
                    }
                }
                break;

            case C93_NOOP:
                break;

            case C93_8X8_INTRA:
                for (j = 0; j < 8; j++)
                    bytestream2_get_buffer(&gb, out + j*stride, 8);
                break;

            default:
                av_log(avctx, AV_LOG_ERROR, "unexpected type %x at %dx%d\n",
                       block_type, x, y);
                return AVERROR_INVALIDDATA;
            }
            bt >>= 4;
            out += 8;
        }
    }

    if (b & C93_HAS_PALETTE) {
        uint32_t *palette = (uint32_t *) newpic->data[1];
        for (i = 0; i < 256; i++) {
            palette[i] = 0xFFU << 24 | bytestream2_get_be24(&gb);
        }
        newpic->palette_has_changed = 1;
    } else {
        if (oldpic->data[1])
            memcpy(newpic->data[1], oldpic->data[1], 256 * 4);
    }

    if ((ret = av_frame_ref(data, newpic)) < 0)
        return ret;
    *got_frame = 1;

    return buf_size;
}
Beispiel #4
0
static int lvf_read_header(AVFormatContext *s)
{
    AVStream *st;
    int64_t next_offset;
    unsigned size, nb_streams, id;

    avio_skip(s->pb, 16);
    nb_streams = avio_rl32(s->pb);
    if (!nb_streams)
        return AVERROR_INVALIDDATA;
    if (nb_streams > 2) {
        avpriv_request_sample(s, "%d streams", nb_streams);
        return AVERROR_PATCHWELCOME;
    }

    avio_skip(s->pb, 1012);

    while (!url_feof(s->pb)) {
        id          = avio_rl32(s->pb);
        size        = avio_rl32(s->pb);
        next_offset = avio_tell(s->pb) + size;

        switch (id) {
        case MKTAG('0', '0', 'f', 'm'):
            st = avformat_new_stream(s, 0);
            if (!st)
                return AVERROR(ENOMEM);

            st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
            avio_skip(s->pb, 4);
            st->codec->width      = avio_rl32(s->pb);
            st->codec->height     = avio_rl32(s->pb);
            avio_skip(s->pb, 4);
            st->codec->codec_tag  = avio_rl32(s->pb);
            st->codec->codec_id   = ff_codec_get_id(ff_codec_bmp_tags,
                                                    st->codec->codec_tag);
            avpriv_set_pts_info(st, 32, 1, 1000);
            break;
        case MKTAG('0', '1', 'f', 'm'):
            st = avformat_new_stream(s, 0);
            if (!st)
                return AVERROR(ENOMEM);

            st->codec->codec_type  = AVMEDIA_TYPE_AUDIO;
            st->codec->codec_tag   = avio_rl16(s->pb);
            st->codec->channels    = avio_rl16(s->pb);
            st->codec->sample_rate = avio_rl16(s->pb);
            avio_skip(s->pb, 8);
            st->codec->bits_per_coded_sample = avio_r8(s->pb);
            st->codec->codec_id    = ff_codec_get_id(ff_codec_wav_tags,
                                                     st->codec->codec_tag);
            avpriv_set_pts_info(st, 32, 1, 1000);
            break;
        case 0:
            avio_seek(s->pb, 2048 + 8, SEEK_SET);
            return 0;
        default:
            avpriv_request_sample(s, "id %d", id);
            return AVERROR_PATCHWELCOME;
        }

        avio_seek(s->pb, next_offset, SEEK_SET);
    }

    return AVERROR_EOF;
}
Beispiel #5
0
static int read_frame(BVID_DemuxContext *vid, AVIOContext *pb, AVPacket *pkt,
                      uint8_t block_type, AVFormatContext *s)
{
    uint8_t * vidbuf_start = NULL;
    int vidbuf_nbytes = 0;
    int code;
    int bytes_copied = 0;
    int position, duration, npixels;
    unsigned int vidbuf_capacity;
    int ret = 0;
    AVStream *st;

    if (vid->video_index < 0) {
        st = avformat_new_stream(s, NULL);
        if (!st)
            return AVERROR(ENOMEM);
        vid->video_index = st->index;
        if (vid->audio_index < 0) {
            avpriv_request_sample(s, "Using default video time base since "
                                  "having no audio packet before the first "
                                  "video packet");
        }
        avpriv_set_pts_info(st, 64, 185, vid->sample_rate);
        st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
        st->codec->codec_id   = AV_CODEC_ID_BETHSOFTVID;
        st->codec->width      = vid->width;
        st->codec->height     = vid->height;
    }
    st      = s->streams[vid->video_index];
    npixels = st->codec->width * st->codec->height;

    vidbuf_start = av_malloc(vidbuf_capacity = BUFFER_PADDING_SIZE);
    if(!vidbuf_start)
        return AVERROR(ENOMEM);

    // save the file position for the packet, include block type
    position = avio_tell(pb) - 1;

    vidbuf_start[vidbuf_nbytes++] = block_type;

    // get the current packet duration
    duration = vid->bethsoft_global_delay + avio_rl16(pb);

    // set the y offset if it exists (decoder header data should be in data section)
    if(block_type == VIDEO_YOFF_P_FRAME){
        if (avio_read(pb, &vidbuf_start[vidbuf_nbytes], 2) != 2) {
            ret = AVERROR(EIO);
            goto fail;
        }
        vidbuf_nbytes += 2;
    }

    do{
        vidbuf_start = av_fast_realloc(vidbuf_start, &vidbuf_capacity, vidbuf_nbytes + BUFFER_PADDING_SIZE);
        if(!vidbuf_start)
            return AVERROR(ENOMEM);

        code = avio_r8(pb);
        vidbuf_start[vidbuf_nbytes++] = code;

        if(code >= 0x80){ // rle sequence
            if(block_type == VIDEO_I_FRAME)
                vidbuf_start[vidbuf_nbytes++] = avio_r8(pb);
        } else if(code){ // plain sequence
            if (avio_read(pb, &vidbuf_start[vidbuf_nbytes], code) != code) {
                ret = AVERROR(EIO);
                goto fail;
            }
            vidbuf_nbytes += code;
        }
        bytes_copied += code & 0x7F;
        if(bytes_copied == npixels){ // sometimes no stop character is given, need to keep track of bytes copied
            // may contain a 0 byte even if read all pixels
            if(avio_r8(pb))
                avio_seek(pb, -1, SEEK_CUR);
            break;
        }
        if (bytes_copied > npixels) {
            ret = AVERROR_INVALIDDATA;
            goto fail;
        }
    } while(code);

    // copy data into packet
    if ((ret = av_new_packet(pkt, vidbuf_nbytes)) < 0)
        goto fail;
    memcpy(pkt->data, vidbuf_start, vidbuf_nbytes);
    av_free(vidbuf_start);

    pkt->pos = position;
    pkt->stream_index = vid->video_index;
    pkt->duration = duration;
    if (block_type == VIDEO_I_FRAME)
        pkt->flags |= AV_PKT_FLAG_KEY;

    /* if there is a new palette available, add it to packet side data */
    if (vid->palette) {
        uint8_t *pdata = av_packet_new_side_data(pkt, AV_PKT_DATA_PALETTE,
                                                 BVID_PALETTE_SIZE);
        if (pdata)
            memcpy(pdata, vid->palette, BVID_PALETTE_SIZE);
        av_freep(&vid->palette);
    }

    vid->nframes--;  // used to check if all the frames were read
    return 0;
fail:
    av_free(vidbuf_start);
    return ret;
}
Beispiel #6
0
static int smjpeg_read_header(AVFormatContext *s)
{
    SMJPEGContext *sc = s->priv_data;
    AVStream *ast = NULL, *vst = NULL;
    AVIOContext *pb = s->pb;
    uint32_t version, htype, hlength, duration;
    char *comment;

    avio_skip(pb, 8); // magic
    version = avio_rb32(pb);
    if (version)
        avpriv_request_sample(s, "Unknown version %"PRIu32, version);

    duration = avio_rb32(pb); // in msec

    while (!pb->eof_reached) {
        htype = avio_rl32(pb);
        switch (htype) {
        case SMJPEG_TXT:
            hlength = avio_rb32(pb);
            if (!hlength || hlength > 512)
                return AVERROR_INVALIDDATA;
            comment = av_malloc(hlength + 1);
            if (!comment)
                return AVERROR(ENOMEM);
            if (avio_read(pb, comment, hlength) != hlength) {
                av_freep(&comment);
                av_log(s, AV_LOG_ERROR, "error when reading comment\n");
                return AVERROR_INVALIDDATA;
            }
            comment[hlength] = 0;
            av_dict_set(&s->metadata, "comment", comment,
                        AV_DICT_DONT_STRDUP_VAL);
            break;
        case SMJPEG_SND:
            if (ast) {
                avpriv_request_sample(s, "Multiple audio streams");
                return AVERROR_PATCHWELCOME;
            }
            hlength = avio_rb32(pb);
            if (hlength < 8)
                return AVERROR_INVALIDDATA;
            ast = avformat_new_stream(s, 0);
            if (!ast)
                return AVERROR(ENOMEM);
            ast->codec->codec_type  = AVMEDIA_TYPE_AUDIO;
            ast->codec->sample_rate = avio_rb16(pb);
            ast->codec->bits_per_coded_sample = avio_r8(pb);
            ast->codec->channels    = avio_r8(pb);
            ast->codec->codec_tag   = avio_rl32(pb);
            ast->codec->codec_id    = ff_codec_get_id(ff_codec_smjpeg_audio_tags,
                                                      ast->codec->codec_tag);
            ast->duration           = duration;
            sc->audio_stream_index  = ast->index;
            avpriv_set_pts_info(ast, 32, 1, 1000);
            avio_skip(pb, hlength - 8);
            break;
        case SMJPEG_VID:
            if (vst) {
                avpriv_request_sample(s, "Multiple video streams");
                return AVERROR_INVALIDDATA;
            }
            hlength = avio_rb32(pb);
            if (hlength < 12)
                return AVERROR_INVALIDDATA;
            avio_skip(pb, 4); // number of frames
            vst = avformat_new_stream(s, 0);
            if (!vst)
                return AVERROR(ENOMEM);
            vst->codec->codec_type = AVMEDIA_TYPE_VIDEO;
            vst->codec->width      = avio_rb16(pb);
            vst->codec->height     = avio_rb16(pb);
            vst->codec->codec_tag  = avio_rl32(pb);
            vst->codec->codec_id   = ff_codec_get_id(ff_codec_smjpeg_video_tags,
                                                     vst->codec->codec_tag);
            vst->duration          = duration;
            sc->video_stream_index = vst->index;
            avpriv_set_pts_info(vst, 32, 1, 1000);
            avio_skip(pb, hlength - 12);
            break;
        case SMJPEG_HEND:
            return 0;
        default:
            av_log(s, AV_LOG_ERROR, "unknown header %"PRIx32"\n", htype);
            return AVERROR_INVALIDDATA;
        }
    }

    return AVERROR_EOF;
}
Beispiel #7
0
static int read_header(AVFormatContext *s)
{
    BRSTMDemuxContext *b = s->priv_data;
    int bom, major, minor, codec, chunk;
    int64_t h1offset, pos, toffset;
    uint32_t size, asize, start = 0;
    AVStream *st;
    int ret = AVERROR_EOF;
    int loop = 0;
    int bfstm = !strcmp("bfstm", s->iformat->name);

    st = avformat_new_stream(s, NULL);
    if (!st)
        return AVERROR(ENOMEM);
    st->codec->codec_type = AVMEDIA_TYPE_AUDIO;

    avio_skip(s->pb, 4);

    bom = avio_rb16(s->pb);
    if (bom != 0xFEFF && bom != 0xFFFE) {
        av_log(s, AV_LOG_ERROR, "invalid byte order: %X\n", bom);
        return AVERROR_INVALIDDATA;
    }

    if (bom == 0xFFFE)
        b->little_endian = 1;

    if (!bfstm) {
        major = avio_r8(s->pb);
        minor = avio_r8(s->pb);
        avio_skip(s->pb, 4); // size of file
        size = read16(s);
        if (size < 14)
            return AVERROR_INVALIDDATA;

        avio_skip(s->pb, size - 14);
        pos = avio_tell(s->pb);
        if (avio_rl32(s->pb) != MKTAG('H','E','A','D'))
            return AVERROR_INVALIDDATA;
    } else {
        uint32_t info_offset = 0;
        uint16_t section_count, header_size, i;

        header_size = read16(s); // 6

        avio_skip(s->pb, 4); // Unknown constant 0x00030000
        avio_skip(s->pb, 4); // size of file
        section_count = read16(s);
        avio_skip(s->pb, 2); // padding
        for (i = 0; avio_tell(s->pb) < header_size
                    && !(start && info_offset)
                    && i < section_count; i++) {
            uint16_t flag = read16(s);
            avio_skip(s->pb, 2);
            switch (flag) {
            case 0x4000:
                info_offset = read32(s);
                /*info_size =*/ read32(s);
                break;
            case 0x4001:
                avio_skip(s->pb, 4); // seek offset
                avio_skip(s->pb, 4); // seek size
                break;
            case 0x4002:
                start = read32(s) + 8;
                avio_skip(s->pb, 4); //data_size = read32(s);
                break;
            case 0x4003:
                avio_skip(s->pb, 4); // REGN offset
                avio_skip(s->pb, 4); // REGN size
                break;
            }
        }

        if (!info_offset || !start)
            return AVERROR_INVALIDDATA;

        avio_skip(s->pb, info_offset - avio_tell(s->pb));
        pos = avio_tell(s->pb);
        if (avio_rl32(s->pb) != MKTAG('I','N','F','O'))
            return AVERROR_INVALIDDATA;
    }

    size = read32(s);
    if (size < 192)
        return AVERROR_INVALIDDATA;
    avio_skip(s->pb, 4); // unknown
    h1offset = read32(s);
    if (h1offset > size)
        return AVERROR_INVALIDDATA;
    avio_skip(s->pb, 12);
    toffset = read32(s) + 16LL;
    if (toffset > size)
        return AVERROR_INVALIDDATA;

    avio_skip(s->pb, pos + h1offset + 8 - avio_tell(s->pb));
    codec = avio_r8(s->pb);

    switch (codec) {
    case 0: codec = AV_CODEC_ID_PCM_S8_PLANAR;    break;
    case 1: codec = b->little_endian ?
                    AV_CODEC_ID_PCM_S16LE_PLANAR :
                    AV_CODEC_ID_PCM_S16BE_PLANAR; break;
    case 2: codec = b->little_endian ?
                    AV_CODEC_ID_ADPCM_THP_LE :
                    AV_CODEC_ID_ADPCM_THP;        break;
    default:
        avpriv_request_sample(s, "codec %d", codec);
        return AVERROR_PATCHWELCOME;
    }

    loop = avio_r8(s->pb); // loop flag
    st->codec->codec_id = codec;
    st->codec->channels = avio_r8(s->pb);
    if (!st->codec->channels)
        return AVERROR_INVALIDDATA;

    avio_skip(s->pb, 1); // padding

    st->codec->sample_rate = bfstm ? read32(s) : read16(s);
    if (!st->codec->sample_rate)
        return AVERROR_INVALIDDATA;

    if (!bfstm)
        avio_skip(s->pb, 2); // padding

    if (loop) {
        if (av_dict_set_int(&s->metadata, "loop_start",
                            av_rescale(read32(s), AV_TIME_BASE,
                                       st->codec->sample_rate),
                            0) < 0)
            return AVERROR(ENOMEM);
    } else {
        avio_skip(s->pb, 4);
    }

    st->start_time = 0;
    st->duration = read32(s);
    avpriv_set_pts_info(st, 64, 1, st->codec->sample_rate);

    if (!bfstm)
        start = read32(s);
    b->current_block = 0;
    b->block_count = read32(s);
    if (b->block_count > UINT16_MAX) {
        av_log(s, AV_LOG_WARNING, "too many blocks: %u\n", b->block_count);
        return AVERROR_INVALIDDATA;
    }

    b->block_size = read32(s);
    if (b->block_size > UINT32_MAX / st->codec->channels)
        return AVERROR_INVALIDDATA;

    b->samples_per_block = read32(s);
    b->last_block_used_bytes = read32(s);
    b->last_block_samples = read32(s);
    b->last_block_size = read32(s);
    if (b->last_block_size > UINT32_MAX / st->codec->channels)
        return AVERROR_INVALIDDATA;
    if (b->last_block_used_bytes > b->last_block_size)
        return AVERROR_INVALIDDATA;


    if (codec == AV_CODEC_ID_ADPCM_THP || codec == AV_CODEC_ID_ADPCM_THP_LE) {
        int ch;

        avio_skip(s->pb, pos + toffset - avio_tell(s->pb));
        if (!bfstm)
            toffset = read32(s) + 16LL;
        else
            toffset = toffset + read32(s) + st->codec->channels * 8 - 8;
        if (toffset > size)
            return AVERROR_INVALIDDATA;

        avio_skip(s->pb, pos + toffset - avio_tell(s->pb));
        b->table = av_mallocz(32 * st->codec->channels);
        if (!b->table)
            return AVERROR(ENOMEM);

        for (ch = 0; ch < st->codec->channels; ch++) {
            if (avio_read(s->pb, b->table + ch * 32, 32) != 32) {
                ret = AVERROR_INVALIDDATA;
                goto fail;
            }
            avio_skip(s->pb, bfstm ? 14 : 24);
        }
    }

    if (size < (avio_tell(s->pb) - pos)) {
        ret = AVERROR_INVALIDDATA;
        goto fail;
    }

    avio_skip(s->pb, size - (avio_tell(s->pb) - pos));

    while (!avio_feof(s->pb)) {
        chunk = avio_rl32(s->pb);
        size  = read32(s);
        if (size < 8) {
            ret = AVERROR_INVALIDDATA;
            goto fail;
        }
        size -= 8;
        switch (chunk) {
        case MKTAG('S','E','E','K'):
        case MKTAG('A','D','P','C'):
            if (codec != AV_CODEC_ID_ADPCM_THP &&
                codec != AV_CODEC_ID_ADPCM_THP_LE)
                goto skip;

            asize = b->block_count * st->codec->channels * 4;
            if (size < asize) {
                ret = AVERROR_INVALIDDATA;
                goto fail;
            }
            if (b->adpc) {
                av_log(s, AV_LOG_WARNING, "skipping additional ADPC chunk\n");
                goto skip;
            } else {
                b->adpc = av_mallocz(asize);
                if (!b->adpc) {
                    ret = AVERROR(ENOMEM);
                    goto fail;
                }
                if (bfstm && codec != AV_CODEC_ID_ADPCM_THP_LE) {
                    // Big-endian BFSTMs have little-endian SEEK tables
                    // for some strange reason.
                    int i;
                    for (i = 0; i < asize; i += 2) {
                        b->adpc[i+1] = avio_r8(s->pb);
                        b->adpc[i]   = avio_r8(s->pb);
                    }
                } else {
                    avio_read(s->pb, b->adpc, asize);
                }
                avio_skip(s->pb, size - asize);
            }
            break;
        case MKTAG('D','A','T','A'):
            if ((start < avio_tell(s->pb)) ||
                (!b->adpc && (codec == AV_CODEC_ID_ADPCM_THP ||
                              codec == AV_CODEC_ID_ADPCM_THP_LE))) {
                ret = AVERROR_INVALIDDATA;
                goto fail;
            }
            avio_skip(s->pb, start - avio_tell(s->pb));

            if (bfstm && (codec == AV_CODEC_ID_ADPCM_THP ||
                          codec == AV_CODEC_ID_ADPCM_THP_LE))
                avio_skip(s->pb, 24);

            b->data_start = avio_tell(s->pb);

            if (!bfstm && (major != 1 || minor))
                avpriv_request_sample(s, "Version %d.%d", major, minor);

            return 0;
        default:
            av_log(s, AV_LOG_WARNING, "skipping unknown chunk: %X\n", chunk);
skip:
            avio_skip(s->pb, size);
        }
    }

fail:
    read_close(s);

    return ret;
}
Beispiel #8
0
static int dss_read_header(AVFormatContext *s)
{
    DSSDemuxContext *ctx = s->priv_data;
    AVIOContext *pb = s->pb;
    AVStream *st;
    int ret;

    st = avformat_new_stream(s, NULL);
    if (!st)
        return AVERROR(ENOMEM);

    ret = dss_read_metadata_string(s, DSS_HEAD_OFFSET_AUTHOR,
                                   DSS_AUTHOR_SIZE, "author");
    if (ret)
        return ret;

    ret = dss_read_metadata_date(s, DSS_HEAD_OFFSET_END_TIME, "date");
    if (ret)
        return ret;

    ret = dss_read_metadata_string(s, DSS_HEAD_OFFSET_COMMENT,
                                   DSS_COMMENT_SIZE, "comment");
    if (ret)
        return ret;

    avio_seek(pb, DSS_HEAD_OFFSET_ACODEC, SEEK_SET);
    ctx->audio_codec = avio_r8(pb);

    if (ctx->audio_codec == DSS_ACODEC_DSS_SP) {
        st->codec->codec_id    = AV_CODEC_ID_DSS_SP;
        st->codec->sample_rate = 12000;
    } else if (ctx->audio_codec == DSS_ACODEC_G723_1) {
        st->codec->codec_id    = AV_CODEC_ID_G723_1;
        st->codec->sample_rate = 8000;
    } else {
        avpriv_request_sample(s, "Support for codec %x in DSS",
                              ctx->audio_codec);
        return AVERROR_PATCHWELCOME;
    }

    st->codec->codec_type     = AVMEDIA_TYPE_AUDIO;
    st->codec->channel_layout = AV_CH_LAYOUT_MONO;
    st->codec->channels       = 1;

    avpriv_set_pts_info(st, 64, 1, st->codec->sample_rate);
    st->start_time = 0;

    /* Jump over header */

    if (avio_seek(pb, DSS_HEADER_SIZE, SEEK_SET) != DSS_HEADER_SIZE)
        return AVERROR(EIO);

    ctx->counter = 0;
    ctx->swap    = 0;

    ctx->dss_sp_buf = av_malloc(DSS_FRAME_SIZE + 1);
    if (!ctx->dss_sp_buf)
        return AVERROR(ENOMEM);

    return 0;
}
Beispiel #9
0
static int rsd_read_header(AVFormatContext *s)
{
    AVIOContext *pb = s->pb;
    int i, version, start = 0x800;
    AVCodecContext *codec;
    AVStream *st = avformat_new_stream(s, NULL);

    if (!st)
        return AVERROR(ENOMEM);

    avio_skip(pb, 3); // "RSD"
    version = avio_r8(pb) - '0';

    codec = st->codec;
    codec->codec_type = AVMEDIA_TYPE_AUDIO;
    codec->codec_tag  = avio_rl32(pb);
    codec->codec_id   = ff_codec_get_id(rsd_tags, codec->codec_tag);
    if (!codec->codec_id) {
        char tag_buf[5];

        av_get_codec_tag_string(tag_buf, sizeof(tag_buf), codec->codec_tag);
        for (i=0; i < FF_ARRAY_ELEMS(rsd_unsupported_tags); i++) {
            if (codec->codec_tag == rsd_unsupported_tags[i]) {
                avpriv_request_sample(s, "Codec tag: %s", tag_buf);
                return AVERROR_PATCHWELCOME;
            }
        }
        av_log(s, AV_LOG_ERROR, "Unknown codec tag: %s\n", tag_buf);
        return AVERROR_INVALIDDATA;
    }

    codec->channels = avio_rl32(pb);
    if (!codec->channels)
        return AVERROR_INVALIDDATA;

    avio_skip(pb, 4); // Bit depth
    codec->sample_rate = avio_rl32(pb);
    if (!codec->sample_rate)
        return AVERROR_INVALIDDATA;

    avio_skip(pb, 4); // Unknown

    switch (codec->codec_id) {
    case AV_CODEC_ID_ADPCM_IMA_RAD:
        codec->block_align = 20 * codec->channels;
        if (pb->seekable)
            st->duration = av_get_audio_frame_duration(codec, avio_size(pb) - start);
        break;
    case AV_CODEC_ID_ADPCM_THP:
        /* RSD3GADP is mono, so only alloc enough memory
           to store the coeff table for a single channel. */

        if (ff_alloc_extradata(codec, 32))
            return AVERROR(ENOMEM);

        start = avio_rl32(pb);

        if (avio_read(s->pb, codec->extradata, 32) != 32)
            return AVERROR_INVALIDDATA;

        for (i = 0; i < 16; i++)
            AV_WB16(codec->extradata + i * 2, AV_RL16(codec->extradata + i * 2));

        if (pb->seekable)
            st->duration = (avio_size(pb) - start) / 8 * 14;
        break;
    case AV_CODEC_ID_PCM_S16LE:
    case AV_CODEC_ID_PCM_S16BE:
        if (version != 4)
            start = avio_rl32(pb);

        if (pb->seekable)
            st->duration = (avio_size(pb) - start) / 2 / codec->channels;
        break;
    }

    avio_skip(pb, start - avio_tell(pb));

    avpriv_set_pts_info(st, 64, 1, codec->sample_rate);

    return 0;
}
Beispiel #10
0
static int xwd_decode_frame(AVCodecContext *avctx, void *data,
                            int *got_frame, AVPacket *avpkt)
{
    AVFrame *p = data;
    const uint8_t *buf = avpkt->data;
    int i, ret, buf_size = avpkt->size;
    uint32_t version, header_size, vclass, ncolors;
    uint32_t xoffset, be, bpp, lsize, rsize;
    uint32_t pixformat, pixdepth, bunit, bitorder, bpad;
    uint32_t rgb[3];
    uint8_t *ptr;
    GetByteContext gb;

    if (buf_size < XWD_HEADER_SIZE)
        return AVERROR_INVALIDDATA;

    bytestream2_init(&gb, buf, buf_size);
    header_size = bytestream2_get_be32u(&gb);

    version = bytestream2_get_be32u(&gb);
    if (version != XWD_VERSION) {
        av_log(avctx, AV_LOG_ERROR, "unsupported version\n");
        return AVERROR_INVALIDDATA;
    }

    if (buf_size < header_size || header_size < XWD_HEADER_SIZE) {
        av_log(avctx, AV_LOG_ERROR, "invalid header size\n");
        return AVERROR_INVALIDDATA;
    }

    pixformat     = bytestream2_get_be32u(&gb);
    pixdepth      = bytestream2_get_be32u(&gb);
    avctx->width  = bytestream2_get_be32u(&gb);
    avctx->height = bytestream2_get_be32u(&gb);
    xoffset       = bytestream2_get_be32u(&gb);
    be            = bytestream2_get_be32u(&gb);
    bunit         = bytestream2_get_be32u(&gb);
    bitorder      = bytestream2_get_be32u(&gb);
    bpad          = bytestream2_get_be32u(&gb);
    bpp           = bytestream2_get_be32u(&gb);
    lsize         = bytestream2_get_be32u(&gb);
    vclass        = bytestream2_get_be32u(&gb);
    rgb[0]        = bytestream2_get_be32u(&gb);
    rgb[1]        = bytestream2_get_be32u(&gb);
    rgb[2]        = bytestream2_get_be32u(&gb);
    bytestream2_skipu(&gb, 8);
    ncolors       = bytestream2_get_be32u(&gb);
    bytestream2_skipu(&gb, header_size - (XWD_HEADER_SIZE - 20));

    av_log(avctx, AV_LOG_DEBUG, "pixformat %d, pixdepth %d, bunit %d, bitorder %d, bpad %d\n",
           pixformat, pixdepth, bunit, bitorder, bpad);
    av_log(avctx, AV_LOG_DEBUG, "vclass %d, ncolors %d, bpp %d, be %d, lsize %d, xoffset %d\n",
           vclass, ncolors, bpp, be, lsize, xoffset);
    av_log(avctx, AV_LOG_DEBUG, "red %0x, green %0x, blue %0x\n", rgb[0], rgb[1], rgb[2]);

    if (pixformat > XWD_Z_PIXMAP) {
        av_log(avctx, AV_LOG_ERROR, "invalid pixmap format\n");
        return AVERROR_INVALIDDATA;
    }

    if (pixdepth == 0 || pixdepth > 32) {
        av_log(avctx, AV_LOG_ERROR, "invalid pixmap depth\n");
        return AVERROR_INVALIDDATA;
    }

    if (xoffset) {
        avpriv_request_sample(avctx, "xoffset %d", xoffset);
        return AVERROR_PATCHWELCOME;
    }

    if (be > 1) {
        av_log(avctx, AV_LOG_ERROR, "invalid byte order\n");
        return AVERROR_INVALIDDATA;
    }

    if (bitorder > 1) {
        av_log(avctx, AV_LOG_ERROR, "invalid bitmap bit order\n");
        return AVERROR_INVALIDDATA;
    }

    if (bunit != 8 && bunit != 16 && bunit != 32) {
        av_log(avctx, AV_LOG_ERROR, "invalid bitmap unit\n");
        return AVERROR_INVALIDDATA;
    }

    if (bpad != 8 && bpad != 16 && bpad != 32) {
        av_log(avctx, AV_LOG_ERROR, "invalid bitmap scan-line pad\n");
        return AVERROR_INVALIDDATA;
    }

    if (bpp == 0 || bpp > 32) {
        av_log(avctx, AV_LOG_ERROR, "invalid bits per pixel\n");
        return AVERROR_INVALIDDATA;
    }

    if (ncolors > 256) {
        av_log(avctx, AV_LOG_ERROR, "invalid number of entries in colormap\n");
        return AVERROR_INVALIDDATA;
    }

    if ((ret = av_image_check_size(avctx->width, avctx->height, 0, NULL)) < 0)
        return ret;

    rsize = FFALIGN(avctx->width * bpp, bpad) / 8;
    if (lsize < rsize) {
        av_log(avctx, AV_LOG_ERROR, "invalid bytes per scan-line\n");
        return AVERROR_INVALIDDATA;
    }

    if (bytestream2_get_bytes_left(&gb) < ncolors * XWD_CMAP_SIZE + avctx->height * lsize) {
        av_log(avctx, AV_LOG_ERROR, "input buffer too small\n");
        return AVERROR_INVALIDDATA;
    }

    if (pixformat != XWD_Z_PIXMAP) {
        av_log(avctx, AV_LOG_ERROR, "pixmap format %d unsupported\n", pixformat);
        return AVERROR_PATCHWELCOME;
    }

    avctx->pix_fmt = AV_PIX_FMT_NONE;
    switch (vclass) {
    case XWD_STATIC_GRAY:
    case XWD_GRAY_SCALE:
        if (bpp != 1)
            return AVERROR_INVALIDDATA;
        if (pixdepth == 1)
            avctx->pix_fmt = AV_PIX_FMT_MONOWHITE;
        break;
    case XWD_STATIC_COLOR:
    case XWD_PSEUDO_COLOR:
        if (bpp == 8)
            avctx->pix_fmt = AV_PIX_FMT_PAL8;
        break;
    case XWD_TRUE_COLOR:
    case XWD_DIRECT_COLOR:
        if (bpp != 16 && bpp != 24 && bpp != 32)
            return AVERROR_INVALIDDATA;
        if (bpp == 16 && pixdepth == 15) {
            if (rgb[0] == 0x7C00 && rgb[1] == 0x3E0 && rgb[2] == 0x1F)
                avctx->pix_fmt = be ? AV_PIX_FMT_RGB555BE : AV_PIX_FMT_RGB555LE;
            else if (rgb[0] == 0x1F && rgb[1] == 0x3E0 && rgb[2] == 0x7C00)
                avctx->pix_fmt = be ? AV_PIX_FMT_BGR555BE : AV_PIX_FMT_BGR555LE;
        } else if (bpp == 16 && pixdepth == 16) {
            if (rgb[0] == 0xF800 && rgb[1] == 0x7E0 && rgb[2] == 0x1F)
                avctx->pix_fmt = be ? AV_PIX_FMT_RGB565BE : AV_PIX_FMT_RGB565LE;
            else if (rgb[0] == 0x1F && rgb[1] == 0x7E0 && rgb[2] == 0xF800)
                avctx->pix_fmt = be ? AV_PIX_FMT_BGR565BE : AV_PIX_FMT_BGR565LE;
        } else if (bpp == 24) {
            if (rgb[0] == 0xFF0000 && rgb[1] == 0xFF00 && rgb[2] == 0xFF)
                avctx->pix_fmt = be ? AV_PIX_FMT_RGB24 : AV_PIX_FMT_BGR24;
            else if (rgb[0] == 0xFF && rgb[1] == 0xFF00 && rgb[2] == 0xFF0000)
                avctx->pix_fmt = be ? AV_PIX_FMT_BGR24 : AV_PIX_FMT_RGB24;
        } else if (bpp == 32) {
            if (rgb[0] == 0xFF0000 && rgb[1] == 0xFF00 && rgb[2] == 0xFF)
                avctx->pix_fmt = be ? AV_PIX_FMT_ARGB : AV_PIX_FMT_BGRA;
            else if (rgb[0] == 0xFF && rgb[1] == 0xFF00 && rgb[2] == 0xFF0000)
                avctx->pix_fmt = be ? AV_PIX_FMT_ABGR : AV_PIX_FMT_RGBA;
        }
        bytestream2_skipu(&gb, ncolors * XWD_CMAP_SIZE);
        break;
    default:
        av_log(avctx, AV_LOG_ERROR, "invalid visual class\n");
        return AVERROR_INVALIDDATA;
    }

    if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
        avpriv_request_sample(avctx,
                              "Unknown file: bpp %d, pixdepth %d, vclass %d",
                              bpp, pixdepth, vclass);
        return AVERROR_PATCHWELCOME;
    }

    if ((ret = ff_get_buffer(avctx, p, 0)) < 0) {
        av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
        return ret;
    }

    p->key_frame = 1;
    p->pict_type = AV_PICTURE_TYPE_I;

    if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
        uint32_t *dst = (uint32_t *)p->data[1];
        uint8_t red, green, blue;

        for (i = 0; i < ncolors; i++) {

            bytestream2_skipu(&gb, 4); // skip colormap entry number
            red    = bytestream2_get_byteu(&gb);
            bytestream2_skipu(&gb, 1);
            green  = bytestream2_get_byteu(&gb);
            bytestream2_skipu(&gb, 1);
            blue   = bytestream2_get_byteu(&gb);
            bytestream2_skipu(&gb, 3); // skip bitmask flag and padding

            dst[i] = red << 16 | green << 8 | blue;
        }
    }

    ptr = p->data[0];
    for (i = 0; i < avctx->height; i++) {
        bytestream2_get_bufferu(&gb, ptr, rsize);
        bytestream2_skipu(&gb, lsize - rsize);
        ptr += p->linesize[0];
    }

    *got_frame       = 1;

    return buf_size;
}
Beispiel #11
0
/**
 * Decode LSE block with initialization parameters
 */
int ff_jpegls_decode_lse(MJpegDecodeContext *s)
{
    int id;
    int tid, wt, maxtab, i, j;

    int len = get_bits(&s->gb, 16);
    id = get_bits(&s->gb, 8);

    switch (id) {
    case 1:
        if (len < 13)
            return AVERROR_INVALIDDATA;

        s->maxval = get_bits(&s->gb, 16);
        s->t1     = get_bits(&s->gb, 16);
        s->t2     = get_bits(&s->gb, 16);
        s->t3     = get_bits(&s->gb, 16);
        s->reset  = get_bits(&s->gb, 16);

        if(s->avctx->debug & FF_DEBUG_PICT_INFO) {
            av_log(s->avctx, AV_LOG_DEBUG, "Coding parameters maxval:%d T1:%d T2:%d T3:%d reset:%d\n",
                   s->maxval, s->t1, s->t2, s->t3, s->reset);
        }

//        ff_jpegls_reset_coding_parameters(s, 0);
        //FIXME quant table?
        break;
    case 2:
        s->palette_index = 0;
    case 3:
        tid= get_bits(&s->gb, 8);
        wt = get_bits(&s->gb, 8);

        if (len < 5)
            return AVERROR_INVALIDDATA;

        if (wt < 1 || wt > MAX_COMPONENTS) {
            avpriv_request_sample(s->avctx, "wt %d", wt);
            return AVERROR_PATCHWELCOME;
        }

        if (!s->maxval)
            maxtab = 255;
        else if ((5 + wt*(s->maxval+1)) < 65535)
            maxtab = s->maxval;
        else
            maxtab = 65530/wt - 1;

        if(s->avctx->debug & FF_DEBUG_PICT_INFO) {
            av_log(s->avctx, AV_LOG_DEBUG, "LSE palette %d tid:%d wt:%d maxtab:%d\n", id, tid, wt, maxtab);
        }
        if (maxtab >= 256) {
            avpriv_request_sample(s->avctx, ">8bit palette");
            return AVERROR_PATCHWELCOME;
        }
        maxtab = FFMIN(maxtab, (len - 5) / wt + s->palette_index);

        if (s->palette_index > maxtab)
            return AVERROR_INVALIDDATA;

        if ((s->avctx->pix_fmt == AV_PIX_FMT_GRAY8 || s->avctx->pix_fmt == AV_PIX_FMT_PAL8) &&
            (s->picture_ptr->format == AV_PIX_FMT_GRAY8 || s->picture_ptr->format == AV_PIX_FMT_PAL8)) {
            uint32_t *pal = (uint32_t *)s->picture_ptr->data[1];
            int shift = 0;

            if (s->avctx->bits_per_raw_sample > 0 && s->avctx->bits_per_raw_sample < 8) {
                maxtab = FFMIN(maxtab, (1<<s->avctx->bits_per_raw_sample)-1);
                shift = 8 - s->avctx->bits_per_raw_sample;
            }

            s->picture_ptr->format =
            s->avctx->pix_fmt = AV_PIX_FMT_PAL8;
            for (i=s->palette_index; i<=maxtab; i++) {
                uint8_t k = i << shift;
                pal[k] = 0;
                for (j=0; j<wt; j++) {
                    pal[k] |= get_bits(&s->gb, 8) << (8*(wt-j-1));
                }
            }
            s->palette_index = i;
        }
        break;
    case 4:
        avpriv_request_sample(s->avctx, "oversize image");
        return AVERROR(ENOSYS);
    default:
        av_log(s->avctx, AV_LOG_ERROR, "invalid id %d\n", id);
        return AVERROR_INVALIDDATA;
    }
    ff_dlog(s->avctx, "ID=%i, T=%i,%i,%i\n", id, s->t1, s->t2, s->t3);

    return 0;
}
Beispiel #12
0
/**
 * Execute ANSI escape code
 * @return 0 on success, negative on error
 */
static int execute_code(AVCodecContext * avctx, int c)
{
    AnsiContext *s = avctx->priv_data;
    int ret, i, width, height;
    switch(c) {
    case 'A': //Cursor Up
        s->y = FFMAX(s->y - (s->nb_args > 0 ? s->args[0]*s->font_height : s->font_height), 0);
        break;
    case 'B': //Cursor Down
        s->y = FFMIN(s->y + (s->nb_args > 0 ? s->args[0]*s->font_height : s->font_height), avctx->height - s->font_height);
        break;
    case 'C': //Cursor Right
        s->x = FFMIN(s->x + (s->nb_args > 0 ? s->args[0]*FONT_WIDTH : FONT_WIDTH), avctx->width  - FONT_WIDTH);
        break;
    case 'D': //Cursor Left
        s->x = FFMAX(s->x - (s->nb_args > 0 ? s->args[0]*FONT_WIDTH : FONT_WIDTH), 0);
        break;
    case 'H': //Cursor Position
    case 'f': //Horizontal and Vertical Position
        s->y = s->nb_args > 0 ? av_clip((s->args[0] - 1)*s->font_height, 0, avctx->height - s->font_height) : 0;
        s->x = s->nb_args > 1 ? av_clip((s->args[1] - 1)*FONT_WIDTH,     0, avctx->width  - FONT_WIDTH) : 0;
        break;
    case 'h': //set creen mode
    case 'l': //reset screen mode
        if (s->nb_args < 2)
            s->args[0] = DEFAULT_SCREEN_MODE;
        width = avctx->width;
        height = avctx->height;
        switch(s->args[0]) {
        case 0: case 1: case 4: case 5: case 13: case 19: //320x200 (25 rows)
            s->font = avpriv_cga_font;
            s->font_height = 8;
            width  = 40<<3;
            height = 25<<3;
            break;
        case 2: case 3: //640x400 (25 rows)
            s->font = avpriv_vga16_font;
            s->font_height = 16;
            width  = 80<<3;
            height = 25<<4;
            break;
        case 6: case 14: //640x200 (25 rows)
            s->font = avpriv_cga_font;
            s->font_height = 8;
            width  = 80<<3;
            height = 25<<3;
            break;
        case 7: //set line wrapping
            break;
        case 15: case 16: //640x350 (43 rows)
            s->font = avpriv_cga_font;
            s->font_height = 8;
            width  = 80<<3;
            height = 43<<3;
            break;
        case 17: case 18: //640x480 (60 rows)
            s->font = avpriv_cga_font;
            s->font_height = 8;
            width  = 80<<3;
            height = 60<<4;
            break;
        default:
            avpriv_request_sample(avctx, "Unsupported screen mode");
        }
        s->x = av_clip(s->x, 0, width  - FONT_WIDTH);
        s->y = av_clip(s->y, 0, height - s->font_height);
        if (width != avctx->width || height != avctx->height) {
            av_frame_unref(s->frame);
            avcodec_set_dimensions(avctx, width, height);
            if ((ret = ff_get_buffer(avctx, s->frame,
                                     AV_GET_BUFFER_FLAG_REF)) < 0)
                return ret;
            s->frame->pict_type           = AV_PICTURE_TYPE_I;
            s->frame->palette_has_changed = 1;
            set_palette((uint32_t *)s->frame->data[1]);
            erase_screen(avctx);
        } else if (c == 'l') {
            erase_screen(avctx);
        }
        break;
    case 'J': //Erase in Page
        switch (s->args[0]) {
        case 0:
            erase_line(avctx, s->x, avctx->width - s->x);
            if (s->y < avctx->height - s->font_height)
                memset(s->frame->data[0] + (s->y + s->font_height)*s->frame->linesize[0],
                    DEFAULT_BG_COLOR, (avctx->height - s->y - s->font_height)*s->frame->linesize[0]);
            break;
        case 1:
            erase_line(avctx, 0, s->x);
            if (s->y > 0)
                memset(s->frame->data[0], DEFAULT_BG_COLOR, s->y * s->frame->linesize[0]);
            break;
        case 2:
            erase_screen(avctx);
        }
        break;
    case 'K': //Erase in Line
        switch(s->args[0]) {
        case 0:
            erase_line(avctx, s->x, avctx->width - s->x);
            break;
        case 1:
            erase_line(avctx, 0, s->x);
            break;
        case 2:
            erase_line(avctx, 0, avctx->width);
        }
        break;
    case 'm': //Select Graphics Rendition
        if (s->nb_args == 0) {
            s->nb_args = 1;
            s->args[0] = 0;
        }
        for (i = 0; i < FFMIN(s->nb_args, MAX_NB_ARGS); i++) {
            int m = s->args[i];
            if (m == 0) {
                s->attributes = 0;
                s->fg = DEFAULT_FG_COLOR;
                s->bg = DEFAULT_BG_COLOR;
            } else if (m == 1 || m == 2 || m == 4 || m == 5 || m == 7 || m == 8) {
                s->attributes |= 1 << (m - 1);
            } else if (m >= 30 && m <= 37) {
                s->fg = ansi_to_cga[m - 30];
            } else if (m == 38 && i + 2 < FFMIN(s->nb_args, MAX_NB_ARGS) && s->args[i + 1] == 5 && s->args[i + 2] < 256) {
                int index = s->args[i + 2];
                s->fg = index < 16 ? ansi_to_cga[index] : index;
                i += 2;
            } else if (m == 39) {
                s->fg = ansi_to_cga[DEFAULT_FG_COLOR];
            } else if (m >= 40 && m <= 47) {
                s->bg = ansi_to_cga[m - 40];
            } else if (m == 48 && i + 2 < FFMIN(s->nb_args, MAX_NB_ARGS) && s->args[i + 1] == 5 && s->args[i + 2] < 256) {
                int index = s->args[i + 2];
                s->bg = index < 16 ? ansi_to_cga[index] : index;
                i += 2;
            } else if (m == 49) {
                s->fg = ansi_to_cga[DEFAULT_BG_COLOR];
            } else {
                avpriv_request_sample(avctx, "Unsupported rendition parameter");
            }
        }
        break;
    case 'n': //Device Status Report
    case 'R': //report current line and column
        /* ignore */
        break;
    case 's': //Save Cursor Position
        s->sx = s->x;
        s->sy = s->y;
        break;
    case 'u': //Restore Cursor Position
        s->x = av_clip(s->sx, 0, avctx->width  - FONT_WIDTH);
        s->y = av_clip(s->sy, 0, avctx->height - s->font_height);
        break;
    default:
        avpriv_request_sample(avctx, "Unknown escape code");
        break;
    }
    s->x = av_clip(s->x, 0, avctx->width  - FONT_WIDTH);
    s->y = av_clip(s->y, 0, avctx->height - s->font_height);
    return 0;
}
Beispiel #13
0
/**
 * Decode LSE block with initialization parameters
 */
int ff_jpegls_decode_lse(MJpegDecodeContext *s)
{
    int id;
    int tid, wt, maxtab, i, j;

    int len = get_bits(&s->gb, 16);  /* length: FIXME: verify field validity */
    id = get_bits(&s->gb, 8);

    switch (id) {
    case 1:
        s->maxval = get_bits(&s->gb, 16);
        s->t1     = get_bits(&s->gb, 16);
        s->t2     = get_bits(&s->gb, 16);
        s->t3     = get_bits(&s->gb, 16);
        s->reset  = get_bits(&s->gb, 16);

//        ff_jpegls_reset_coding_parameters(s, 0);
        //FIXME quant table?
        break;
    case 2:
        s->palette_index = 0;
    case 3:
        tid= get_bits(&s->gb, 8);
        wt = get_bits(&s->gb, 8);

        if (len < 5)
            return AVERROR_INVALIDDATA;

        if (wt < 1 || wt > MAX_COMPONENTS) {
            avpriv_request_sample(s->avctx, "wt %d", wt);
            return AVERROR_PATCHWELCOME;
        }

        if ((5 + wt*(s->maxval+1)) < 65535)
            maxtab = s->maxval;
        else
            maxtab = 65530/wt - 1;

        if(s->avctx->debug & FF_DEBUG_PICT_INFO) {
            av_log(s->avctx, AV_LOG_DEBUG, "LSE palette %d tid:%d wt:%d maxtab:%d\n", id, tid, wt, maxtab);
        }
        if (maxtab >= 256) {
            avpriv_request_sample(s->avctx, ">8bit palette");
            return AVERROR_PATCHWELCOME;
        }
        maxtab = FFMIN(maxtab, (len - 5) / wt + s->palette_index);

        if (s->palette_index > maxtab)
            return AVERROR_INVALIDDATA;

        if ((s->avctx->pix_fmt == AV_PIX_FMT_GRAY8 || s->avctx->pix_fmt == AV_PIX_FMT_PAL8) &&
                (s->picture_ptr->format == AV_PIX_FMT_GRAY8 || s->picture_ptr->format == AV_PIX_FMT_PAL8)) {
            uint32_t *pal = s->picture_ptr->data[1];
            s->picture_ptr->format =
                s->avctx->pix_fmt = AV_PIX_FMT_PAL8;
            for (i=s->palette_index; i<maxtab; i++) {
                pal[i] = 0;
                for (j=0; j<wt; j++) {
                    pal[i] |= get_bits(&s->gb, 8) << (8*wt);
                }
            }
            s->palette_index = i;
        }
        break;
    case 4:
        av_log(s->avctx, AV_LOG_ERROR, "oversize image not supported\n");
        return AVERROR(ENOSYS);
    default:
        av_log(s->avctx, AV_LOG_ERROR, "invalid id %d\n", id);
        return AVERROR_INVALIDDATA;
    }
    av_dlog(s->avctx, "ID=%i, T=%i,%i,%i\n", id, s->t1, s->t2, s->t3);

    return 0;
}
Beispiel #14
0
static int cdxl_decode_frame(AVCodecContext *avctx, void *data,
                             int *got_frame, AVPacket *pkt)
{
    CDXLVideoContext *c = avctx->priv_data;
    AVFrame * const p = data;
    int ret, w, h, encoding, aligned_width, buf_size = pkt->size;
    const uint8_t *buf = pkt->data;

    if (buf_size < 32)
        return AVERROR_INVALIDDATA;
    encoding        = buf[1] & 7;
    c->format       = buf[1] & 0xE0;
    w               = AV_RB16(&buf[14]);
    h               = AV_RB16(&buf[16]);
    c->bpp          = buf[19];
    c->palette_size = AV_RB16(&buf[20]);
    c->palette      = buf + 32;
    c->video        = c->palette + c->palette_size;
    c->video_size   = buf_size - c->palette_size - 32;

    if (c->palette_size > 512)
        return AVERROR_INVALIDDATA;
    if (buf_size < c->palette_size + 32)
        return AVERROR_INVALIDDATA;
    if (c->bpp < 1)
        return AVERROR_INVALIDDATA;
    if (c->format != BIT_PLANAR && c->format != BIT_LINE) {
        avpriv_request_sample(avctx, "Pixel format 0x%0x", c->format);
        return AVERROR_PATCHWELCOME;
    }

    if ((ret = ff_set_dimensions(avctx, w, h)) < 0)
        return ret;

    aligned_width = FFALIGN(c->avctx->width, 16);
    c->padded_bits  = aligned_width - c->avctx->width;
    if (c->video_size < aligned_width * avctx->height * c->bpp / 8)
        return AVERROR_INVALIDDATA;
    if (!encoding && c->palette_size && c->bpp <= 8) {
        avctx->pix_fmt = AV_PIX_FMT_PAL8;
    } else if (encoding == 1 && (c->bpp == 6 || c->bpp == 8)) {
        if (c->palette_size != (1 << (c->bpp - 1)))
            return AVERROR_INVALIDDATA;
        avctx->pix_fmt = AV_PIX_FMT_BGR24;
    } else {
        avpriv_request_sample(avctx, "Encoding %d and bpp %d",
                              encoding, c->bpp);
        return AVERROR_PATCHWELCOME;
    }

    if ((ret = ff_get_buffer(avctx, p, 0)) < 0) {
        av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
        return ret;
    }
    p->pict_type = AV_PICTURE_TYPE_I;

    if (encoding) {
        av_fast_padded_malloc(&c->new_video, &c->new_video_size,
                              h * w + FF_INPUT_BUFFER_PADDING_SIZE);
        if (!c->new_video)
            return AVERROR(ENOMEM);
        if (c->bpp == 8)
            cdxl_decode_ham8(c, p);
        else
            cdxl_decode_ham6(c, p);
    } else {
        cdxl_decode_rgb(c, p);
    }
    *got_frame = 1;

    return buf_size;
}
Beispiel #15
0
static int genh_read_header(AVFormatContext *s)
{
    unsigned start_offset, header_size, codec, coef_type, coef[2];
    GENHDemuxContext *c = s->priv_data;
    av_unused unsigned coef_splitted[2];
    int align, ch, ret;
    AVStream *st;

    avio_skip(s->pb, 4);

    st = avformat_new_stream(s, NULL);
    if (!st)
        return AVERROR(ENOMEM);

    st->codec->codec_type  = AVMEDIA_TYPE_AUDIO;
    st->codec->channels    = avio_rl32(s->pb);
    if (st->codec->channels <= 0)
        return AVERROR_INVALIDDATA;
    if (st->codec->channels == 1)
        st->codec->channel_layout = AV_CH_LAYOUT_MONO;
    else if (st->codec->channels == 2)
        st->codec->channel_layout = AV_CH_LAYOUT_STEREO;
    align                  =
    c->interleave_size     = avio_rl32(s->pb);
    if (align < 0 || align > INT_MAX / st->codec->channels)
        return AVERROR_INVALIDDATA;
    st->codec->block_align = align * st->codec->channels;
    st->codec->sample_rate = avio_rl32(s->pb);
    avio_skip(s->pb, 4);
    st->duration = avio_rl32(s->pb);

    codec = avio_rl32(s->pb);
    switch (codec) {
    case  0: st->codec->codec_id = AV_CODEC_ID_ADPCM_PSX;        break;
    case  1:
    case 11: st->codec->bits_per_coded_sample = 4;
             st->codec->block_align = 36 * st->codec->channels;
             st->codec->codec_id = AV_CODEC_ID_ADPCM_IMA_WAV;    break;
    case  2: st->codec->codec_id = AV_CODEC_ID_ADPCM_DTK;        break;
    case  3: st->codec->codec_id = st->codec->block_align > 0 ?
                                   AV_CODEC_ID_PCM_S16BE_PLANAR :
                                   AV_CODEC_ID_PCM_S16BE;        break;
    case  4: st->codec->codec_id = st->codec->block_align > 0 ?
                                   AV_CODEC_ID_PCM_S16LE_PLANAR :
                                   AV_CODEC_ID_PCM_S16LE;        break;
    case  5: st->codec->codec_id = st->codec->block_align > 0 ?
                                   AV_CODEC_ID_PCM_S8_PLANAR :
                                   AV_CODEC_ID_PCM_S8;           break;
    case  6: st->codec->codec_id = AV_CODEC_ID_SDX2_DPCM;        break;
    case  7: ret = ff_alloc_extradata(st->codec, 2);
             if (ret < 0)
                 return ret;
             AV_WL16(st->codec->extradata, 3);
             st->codec->codec_id = AV_CODEC_ID_ADPCM_IMA_WS;     break;
    case 10: st->codec->codec_id = AV_CODEC_ID_ADPCM_AICA;       break;
    case 12: st->codec->codec_id = AV_CODEC_ID_ADPCM_THP;        break;
    case 13: st->codec->codec_id = AV_CODEC_ID_PCM_U8;           break;
    case 17: st->codec->codec_id = AV_CODEC_ID_ADPCM_IMA_QT;     break;
    default:
             avpriv_request_sample(s, "codec %d", codec);
             return AVERROR_PATCHWELCOME;
    }

    start_offset = avio_rl32(s->pb);
    header_size  = avio_rl32(s->pb);

    if (header_size > start_offset)
        return AVERROR_INVALIDDATA;

    if (header_size == 0)
        start_offset = 0x800;

    coef[0]          = avio_rl32(s->pb);
    coef[1]          = avio_rl32(s->pb);
    c->dsp_int_type  = avio_rl32(s->pb);
    coef_type        = avio_rl32(s->pb);
    coef_splitted[0] = avio_rl32(s->pb);
    coef_splitted[1] = avio_rl32(s->pb);

    if (st->codec->codec_id == AV_CODEC_ID_ADPCM_THP) {
        if (st->codec->channels > 2) {
            avpriv_request_sample(s, "channels %d>2", st->codec->channels);
            return AVERROR_PATCHWELCOME;
        }

        ff_alloc_extradata(st->codec, 32 * st->codec->channels);
        for (ch = 0; ch < st->codec->channels; ch++) {
            if (coef_type & 1) {
                avpriv_request_sample(s, "coef_type & 1");
                return AVERROR_PATCHWELCOME;
            } else {
                avio_seek(s->pb, coef[ch], SEEK_SET);
                avio_read(s->pb, st->codec->extradata + 32 * ch, 32);
            }
        }

        if (c->dsp_int_type == 1) {
            st->codec->block_align = 8 * st->codec->channels;
            if (c->interleave_size != 1 &&
                c->interleave_size != 2 &&
                c->interleave_size != 4)
                return AVERROR_INVALIDDATA;
        }
    }

    avio_skip(s->pb, start_offset - avio_tell(s->pb));

    avpriv_set_pts_info(st, 64, 1, st->codec->sample_rate);

    return 0;
}
Beispiel #16
0
static int xwma_read_header(AVFormatContext *s)
{
    int64_t size;
    int ret = 0;
    uint32_t dpds_table_size = 0;
    uint32_t *dpds_table = NULL;
    unsigned int tag;
    AVIOContext *pb = s->pb;
    AVStream *st;
    XWMAContext *xwma = s->priv_data;
    int i;

    /* The following code is mostly copied from wav.c, with some
     * minor alterations.
     */

    /* check RIFF header */
    tag = avio_rl32(pb);
    if (tag != MKTAG('R', 'I', 'F', 'F'))
        return -1;
    avio_rl32(pb); /* file size */
    tag = avio_rl32(pb);
    if (tag != MKTAG('X', 'W', 'M', 'A'))
        return -1;

    /* parse fmt header */
    tag = avio_rl32(pb);
    if (tag != MKTAG('f', 'm', 't', ' '))
        return -1;
    size = avio_rl32(pb);
    st = avformat_new_stream(s, NULL);
    if (!st)
        return AVERROR(ENOMEM);

    ret = ff_get_wav_header(pb, st->codec, size, 0);
    if (ret < 0)
        return ret;
    st->need_parsing = AVSTREAM_PARSE_NONE;

    /* All xWMA files I have seen contained WMAv2 data. If there are files
     * using WMA Pro or some other codec, then we need to figure out the right
     * extradata for that. Thus, ask the user for feedback, but try to go on
     * anyway.
     */
    if (st->codec->codec_id != AV_CODEC_ID_WMAV2) {
        avpriv_request_sample(s, "Unexpected codec (tag 0x04%x; id %d)",
                              st->codec->codec_tag, st->codec->codec_id);
    } else {
        /* In all xWMA files I have seen, there is no extradata. But the WMA
         * codecs require extradata, so we provide our own fake extradata.
         *
         * First, check that there really was no extradata in the header. If
         * there was, then try to use it, after asking the user to provide a
         * sample of this unusual file.
         */
        if (st->codec->extradata_size != 0) {
            /* Surprise, surprise: We *did* get some extradata. No idea
             * if it will work, but just go on and try it, after asking
             * the user for a sample.
             */
            avpriv_request_sample(s, "Unexpected extradata (%d bytes)",
                                  st->codec->extradata_size);
        } else {
            st->codec->extradata_size = 6;
            st->codec->extradata      = av_mallocz(6 + FF_INPUT_BUFFER_PADDING_SIZE);
            if (!st->codec->extradata)
                return AVERROR(ENOMEM);

            /* setup extradata with our experimentally obtained value */
            st->codec->extradata[4] = 31;
        }
    }

    if (!st->codec->channels) {
        av_log(s, AV_LOG_WARNING, "Invalid channel count: %d\n",
               st->codec->channels);
        return AVERROR_INVALIDDATA;
    }
    if (!st->codec->bits_per_coded_sample) {
        av_log(s, AV_LOG_WARNING, "Invalid bits_per_coded_sample: %d\n",
               st->codec->bits_per_coded_sample);
        return AVERROR_INVALIDDATA;
    }

    /* set the sample rate */
    avpriv_set_pts_info(st, 64, 1, st->codec->sample_rate);

    /* parse the remaining RIFF chunks */
    for (;;) {
        if (pb->eof_reached) {
            ret = AVERROR_EOF;
            goto fail;
        }
        /* read next chunk tag */
        tag = avio_rl32(pb);
        size = avio_rl32(pb);
        if (tag == MKTAG('d', 'a', 't', 'a')) {
            /* We assume that the data chunk comes last. */
            break;
        } else if (tag == MKTAG('d','p','d','s')) {
            /* Quoting the MSDN xWMA docs on the dpds chunk: "Contains the
             * decoded packet cumulative data size array, each element is the
             * number of bytes accumulated after the corresponding xWMA packet
             * is decoded in order."
             *
             * Each packet has size equal to st->codec->block_align, which in
             * all cases I saw so far was always 2230. Thus, we can use the
             * dpds data to compute a seeking index.
             */

            /* Error out if there is more than one dpds chunk. */
            if (dpds_table) {
                av_log(s, AV_LOG_ERROR, "two dpds chunks present\n");
                ret = AVERROR_INVALIDDATA;
                goto fail;
            }

            /* Compute the number of entries in the dpds chunk. */
            if (size & 3) {  /* Size should be divisible by four */
                av_log(s, AV_LOG_WARNING,
                       "dpds chunk size %"PRId64" not divisible by 4\n", size);
            }
            dpds_table_size = size / 4;
            if (dpds_table_size == 0 || dpds_table_size >= INT_MAX / 4) {
                av_log(s, AV_LOG_ERROR,
                       "dpds chunk size %"PRId64" invalid\n", size);
                return AVERROR_INVALIDDATA;
            }

            /* Allocate some temporary storage to keep the dpds data around.
             * for processing later on.
             */
            dpds_table = av_malloc(dpds_table_size * sizeof(uint32_t));
            if (!dpds_table) {
                return AVERROR(ENOMEM);
            }

            for (i = 0; i < dpds_table_size; ++i) {
                dpds_table[i] = avio_rl32(pb);
                size -= 4;
            }
        }
        avio_skip(pb, size);
    }

    /* Determine overall data length */
    if (size < 0) {
        ret = AVERROR_INVALIDDATA;
        goto fail;
    }
    if (!size) {
        xwma->data_end = INT64_MAX;
    } else
        xwma->data_end = avio_tell(pb) + size;


    if (dpds_table && dpds_table_size) {
        int64_t cur_pos;
        const uint32_t bytes_per_sample
                = (st->codec->channels * st->codec->bits_per_coded_sample) >> 3;

        /* Estimate the duration from the total number of output bytes. */
        const uint64_t total_decoded_bytes = dpds_table[dpds_table_size - 1];

        if (!bytes_per_sample) {
            av_log(s, AV_LOG_ERROR,
                   "Invalid bits_per_coded_sample %d for %d channels\n",
                   st->codec->bits_per_coded_sample, st->codec->channels);
            ret = AVERROR_INVALIDDATA;
            goto fail;
        }

        st->duration = total_decoded_bytes / bytes_per_sample;

        /* Use the dpds data to build a seek table.  We can only do this after
         * we know the offset to the data chunk, as we need that to determine
         * the actual offset to each input block.
         * Note: If we allowed ourselves to assume that the data chunk always
         * follows immediately after the dpds block, we could of course guess
         * the data block's start offset already while reading the dpds chunk.
         * I decided against that, just in case other chunks ever are
         * discovered.
         */
        cur_pos = avio_tell(pb);
        for (i = 0; i < dpds_table_size; ++i) {
            /* From the number of output bytes that would accumulate in the
             * output buffer after decoding the first (i+1) packets, we compute
             * an offset / timestamp pair.
             */
            av_add_index_entry(st,
                               cur_pos + (i+1) * st->codec->block_align, /* pos */
                               dpds_table[i] / bytes_per_sample,         /* timestamp */
                               st->codec->block_align,                   /* size */
                               0,                                        /* duration */
                               AVINDEX_KEYFRAME);
        }
    } else if (st->codec->bit_rate) {
Beispiel #17
0
static int decode_frame(AVCodecContext *avctx,
                        void *data, int *got_frame,
                        AVPacket *avpkt)
{
    AnmContext *s = avctx->priv_data;
    const int buf_size = avpkt->size;
    uint8_t *dst, *dst_end;
    int count, ret;

    if ((ret = ff_reget_buffer(avctx, s->frame)) < 0)
        return ret;
    dst     = s->frame->data[0];
    dst_end = s->frame->data[0] + s->frame->linesize[0]*avctx->height;

    bytestream2_init(&s->gb, avpkt->data, buf_size);

    if (bytestream2_get_byte(&s->gb) != 0x42) {
        avpriv_request_sample(avctx, "Unknown record type");
        return AVERROR_INVALIDDATA;
    }
    if (bytestream2_get_byte(&s->gb)) {
        avpriv_request_sample(avctx, "Padding bytes");
        return AVERROR_PATCHWELCOME;
    }
    bytestream2_skip(&s->gb, 2);

    s->x = 0;
    do {
        /* if statements are ordered by probability */
#define OP(gb, pixel, count) \
    op(&dst, dst_end, (gb), (pixel), (count), &s->x, avctx->width, s->frame->linesize[0])

        int type = bytestream2_get_byte(&s->gb);
        count = type & 0x7F;
        type >>= 7;
        if (count) {
            if (OP(type ? NULL : &s->gb, -1, count)) break;
        } else if (!type) {
            int pixel;
            count = bytestream2_get_byte(&s->gb);  /* count==0 gives nop */
            pixel = bytestream2_get_byte(&s->gb);
            if (OP(NULL, pixel, count)) break;
        } else {
            int pixel;
            type = bytestream2_get_le16(&s->gb);
            count = type & 0x3FFF;
            type >>= 14;
            if (!count) {
                if (type == 0)
                    break; // stop
                if (type == 2) {
                    avpriv_request_sample(avctx, "Unknown opcode");
                    return AVERROR_PATCHWELCOME;
                }
                continue;
            }
            pixel = type == 3 ? bytestream2_get_byte(&s->gb) : -1;
            if (type == 1) count += 0x4000;
            if (OP(type == 2 ? &s->gb : NULL, pixel, count)) break;
        }
    } while (bytestream2_get_bytes_left(&s->gb) > 0);

    memcpy(s->frame->data[1], s->palette, AVPALETTE_SIZE);

    *got_frame = 1;
    if ((ret = av_frame_ref(data, s->frame)) < 0)
        return ret;

    return buf_size;
}