Exemple #1
0
static int msnwc_tcp_probe(AVProbeData *p)
{
    int i;

    for(i = 0 ; i + HEADER_SIZE <= p->buf_size ; i++) {
        uint16_t width, height;
        uint32_t fourcc;
        const uint8_t *bytestream = p->buf+i;

        if(bytestream_get_le16(&bytestream) != HEADER_SIZE)
            continue;
        width  = bytestream_get_le16(&bytestream);
        height = bytestream_get_le16(&bytestream);
        if(!(width==320 && height==240) && !(width==160 && height==120))
            continue;
        bytestream += 2; // keyframe
        bytestream += 4; // size
        fourcc = bytestream_get_le32(&bytestream);
        if(fourcc != MKTAG('M', 'L', '2', '0'))
            continue;

        if(i) {
            if(i < 14)  /* starts with SwitchBoard connection info */
                return AVPROBE_SCORE_MAX / 2;
            else        /* starts in the middle of stream */
                return AVPROBE_SCORE_MAX / 3;
        } else {
            return AVPROBE_SCORE_MAX;
        }
    }

    return -1;
}
Exemple #2
0
static int gif_read_header1(GifState *s)
{
    uint8_t sig[6];
    int v, n;
    int background_color_index;

    if (s->bytestream_end < s->bytestream + 13)
        return AVERROR_INVALIDDATA;

    /* read gif signature */
    bytestream_get_buffer(&s->bytestream, sig, 6);
    if (memcmp(sig, gif87a_sig, 6) != 0 &&
        memcmp(sig, gif89a_sig, 6) != 0)
        return AVERROR_INVALIDDATA;

    /* read screen header */
    s->transparent_color_index = -1;
    s->screen_width = bytestream_get_le16(&s->bytestream);
    s->screen_height = bytestream_get_le16(&s->bytestream);
    if(   (unsigned)s->screen_width  > 32767
       || (unsigned)s->screen_height > 32767){
        av_log(s->avctx, AV_LOG_ERROR, "picture size too large\n");
        return AVERROR_INVALIDDATA;
    }

    av_fast_malloc(&s->idx_line, &s->idx_line_size, s->screen_width);
    if (!s->idx_line)
        return AVERROR(ENOMEM);

    v = bytestream_get_byte(&s->bytestream);
    s->color_resolution = ((v & 0x70) >> 4) + 1;
    s->has_global_palette = (v & 0x80);
    s->bits_per_pixel = (v & 0x07) + 1;
    background_color_index = bytestream_get_byte(&s->bytestream);
    n = bytestream_get_byte(&s->bytestream);
    if (n) {
        s->avctx->sample_aspect_ratio.num = n + 15;
        s->avctx->sample_aspect_ratio.den = 64;
    }

    av_dlog(s->avctx, "screen_w=%d screen_h=%d bpp=%d global_palette=%d\n",
           s->screen_width, s->screen_height, s->bits_per_pixel,
           s->has_global_palette);

    if (s->has_global_palette) {
        s->background_color_index = background_color_index;
        n = 1 << s->bits_per_pixel;
        if (s->bytestream_end < s->bytestream + n * 3)
            return AVERROR_INVALIDDATA;

        gif_read_palette(&s->bytestream, s->global_palette, n);
        s->bg_color = s->global_palette[s->background_color_index];
    } else
        s->background_color_index = -1;

    return 0;
}
Exemple #3
0
static int gif_read_header1(GifState *s)
{
    uint8_t sig[6];
    int v, n;
    int has_global_palette;

    if (s->bytestream_end < s->bytestream + 13)
        return -1;

    /* read gif signature */
    bytestream_get_buffer(&s->bytestream, sig, 6);
    if (memcmp(sig, gif87a_sig, 6) != 0 &&
        memcmp(sig, gif89a_sig, 6) != 0)
        return -1;

    /* read screen header */
    s->transparent_color_index = -1;
    s->screen_width = bytestream_get_le16(&s->bytestream);
    s->screen_height = bytestream_get_le16(&s->bytestream);
    if(   (unsigned)s->screen_width  > 32767
       || (unsigned)s->screen_height > 32767){
        av_log(NULL, AV_LOG_ERROR, "picture size too large\n");
        return -1;
    }

    v = bytestream_get_byte(&s->bytestream);
    s->color_resolution = ((v & 0x70) >> 4) + 1;
    has_global_palette = (v & 0x80);
    s->bits_per_pixel = (v & 0x07) + 1;
    s->background_color_index = bytestream_get_byte(&s->bytestream);
    bytestream_get_byte(&s->bytestream);                /* ignored */
#ifdef DEBUG
    dprintf(s->avctx, "gif: screen_w=%d screen_h=%d bpp=%d global_palette=%d\n",
           s->screen_width, s->screen_height, s->bits_per_pixel,
           has_global_palette);
#endif
    if (has_global_palette) {
        n = 1 << s->bits_per_pixel;
        if (s->bytestream_end < s->bytestream + n * 3)
            return -1;
        bytestream_get_buffer(&s->bytestream, s->global_palette, n * 3);
    }
    return 0;
}
Exemple #4
0
static int mimic_decode_frame(AVCodecContext *avctx, void *data,
                              int *data_size, const uint8_t *buf, int buf_size)
{
    MimicContext *ctx = avctx->priv_data;
    int is_pframe;
    int width, height;
    int quality, num_coeffs;
    int swap_buf_size = buf_size - MIMIC_HEADER_SIZE;

    if(buf_size < MIMIC_HEADER_SIZE) {
        av_log(avctx, AV_LOG_ERROR, "insufficient data\n");
        return -1;
    }

    buf       += 2; /* some constant (always 256) */
    quality    = bytestream_get_le16(&buf);
    width      = bytestream_get_le16(&buf);
    height     = bytestream_get_le16(&buf);
    buf       += 4; /* some constant */
    is_pframe  = bytestream_get_le32(&buf);
    num_coeffs = bytestream_get_byte(&buf);
    buf       += 3; /* some constant */

    if(!ctx->avctx) {
        int i;

        if(!(width == 160 && height == 120) &&
           !(width == 320 && height == 240)) {
            av_log(avctx, AV_LOG_ERROR, "invalid width/height!\n");
            return -1;
        }

        ctx->avctx     = avctx;
        avctx->width   = width;
        avctx->height  = height;
        avctx->pix_fmt = PIX_FMT_YUV420P;
        for(i = 0; i < 3; i++) {
            ctx->num_vblocks[i] = -((-height) >> (3 + !!i));
            ctx->num_hblocks[i] =     width   >> (3 + !!i) ;
        }
    } else if(width != ctx->avctx->width || height != ctx->avctx->height) {
Exemple #5
0
static int gif_read_extension(GifState *s)
{
    int ext_code, ext_len, i, gce_flags, gce_transparent_index;

    /* extension */
    ext_code = bytestream_get_byte(&s->bytestream);
    ext_len = bytestream_get_byte(&s->bytestream);
#ifdef DEBUG
    dprintf(s->avctx, "gif: ext_code=0x%x len=%d\n", ext_code, ext_len);
#endif
    switch(ext_code) {
    case 0xf9:
        if (ext_len != 4)
            goto discard_ext;
        s->transparent_color_index = -1;
        gce_flags = bytestream_get_byte(&s->bytestream);
        s->gce_delay = bytestream_get_le16(&s->bytestream);
        gce_transparent_index = bytestream_get_byte(&s->bytestream);
        if (gce_flags & 0x01)
            s->transparent_color_index = gce_transparent_index;
        else
            s->transparent_color_index = -1;
        s->gce_disposal = (gce_flags >> 2) & 0x7;
#ifdef DEBUG
        dprintf(s->avctx, "gif: gce_flags=%x delay=%d tcolor=%d disposal=%d\n",
               gce_flags, s->gce_delay,
               s->transparent_color_index, s->gce_disposal);
#endif
        ext_len = bytestream_get_byte(&s->bytestream);
        break;
    }

    /* NOTE: many extension blocks can come after */
 discard_ext:
    while (ext_len != 0) {
        for (i = 0; i < ext_len; i++)
            bytestream_get_byte(&s->bytestream);
        ext_len = bytestream_get_byte(&s->bytestream);
#ifdef DEBUG
        dprintf(s->avctx, "gif: ext_len1=%d\n", ext_len);
#endif
    }
    return 0;
}
Exemple #6
0
static int gif_read_image(GifState *s)
{
    int left, top, width, height, bits_per_pixel, code_size, flags;
    int is_interleaved, has_local_palette, y, pass, y1, linesize, n, i;
    uint8_t *ptr, *spal, *palette, *ptr1;

    left = bytestream_get_le16(&s->bytestream);
    top = bytestream_get_le16(&s->bytestream);
    width = bytestream_get_le16(&s->bytestream);
    height = bytestream_get_le16(&s->bytestream);
    flags = bytestream_get_byte(&s->bytestream);
    is_interleaved = flags & 0x40;
    has_local_palette = flags & 0x80;
    bits_per_pixel = (flags & 0x07) + 1;
#ifdef DEBUG
    dprintf(s->avctx, "gif: image x=%d y=%d w=%d h=%d\n", left, top, width, height);
#endif

    if (has_local_palette) {
        bytestream_get_buffer(&s->bytestream, s->local_palette, 3 * (1 << bits_per_pixel));
        palette = s->local_palette;
    } else {
        palette = s->global_palette;
        bits_per_pixel = s->bits_per_pixel;
    }

    /* verify that all the image is inside the screen dimensions */
    if (left + width > s->screen_width ||
        top + height > s->screen_height)
        return AVERROR(EINVAL);

    /* build the palette */
    n = (1 << bits_per_pixel);
    spal = palette;
    for(i = 0; i < n; i++) {
        s->image_palette[i] = (0xff << 24) | AV_RB24(spal);
        spal += 3;
    }
    for(; i < 256; i++)
        s->image_palette[i] = (0xff << 24);
    /* handle transparency */
    if (s->transparent_color_index >= 0)
        s->image_palette[s->transparent_color_index] = 0;

    /* now get the image data */
    code_size = bytestream_get_byte(&s->bytestream);
    ff_lzw_decode_init(s->lzw, code_size, s->bytestream,
                       s->bytestream_end - s->bytestream, FF_LZW_GIF);

    /* read all the image */
    linesize = s->picture.linesize[0];
    ptr1 = s->picture.data[0] + top * linesize + left;
    ptr = ptr1;
    pass = 0;
    y1 = 0;
    for (y = 0; y < height; y++) {
        ff_lzw_decode(s->lzw, ptr, width);
        if (is_interleaved) {
            switch(pass) {
            default:
            case 0:
            case 1:
                y1 += 8;
                ptr += linesize * 8;
                if (y1 >= height) {
                    y1 = pass ? 2 : 4;
                    ptr = ptr1 + linesize * y1;
                    pass++;
                }
                break;
            case 2:
                y1 += 4;
                ptr += linesize * 4;
                if (y1 >= height) {
                    y1 = 1;
                    ptr = ptr1 + linesize;
                    pass++;
                }
                break;
            case 3:
                y1 += 2;
                ptr += linesize * 2;
                break;
            }
        } else {
            ptr += linesize;
        }
    }
    /* read the garbage data until end marker is found */
    ff_lzw_decode_tail(s->lzw);
    s->bytestream = ff_lzw_cur_ptr(s->lzw);
    return 0;
}
Exemple #7
0
static int decode_frame(AVCodecContext *avctx, void *data, int *data_size,
                        const uint8_t *buf, int buf_size) {
    AVSubtitle *sub = data;
    const uint8_t *buf_end = buf + buf_size;
    uint8_t *bitmap;
    int w, h, x, y, rlelen, i;
    GetBitContext gb;

    // check that at least header fits
    if (buf_size < 27 + 7 * 2 + 4 * 3) {
        av_log(avctx, AV_LOG_ERROR, "coded frame too small\n");
        return -1;
    }

    // read start and end time
    if (buf[0] != '[' || buf[13] != '-' || buf[26] != ']') {
        av_log(avctx, AV_LOG_ERROR, "invalid time code\n");
        return -1;
    }
    sub->start_display_time = parse_timecode(buf +  1);
    sub->end_display_time   = parse_timecode(buf + 14);
    buf += 27;

    // read header
    w = bytestream_get_le16(&buf);
    h = bytestream_get_le16(&buf);
    if (avcodec_check_dimensions(avctx, w, h) < 0)
        return -1;
    x = bytestream_get_le16(&buf);
    y = bytestream_get_le16(&buf);
    // skip bottom right position, it gives no new information
    bytestream_get_le16(&buf);
    bytestream_get_le16(&buf);
    rlelen = bytestream_get_le16(&buf);

    // allocate sub and set values
    if (!sub->rects) {
        sub->rects = av_mallocz(sizeof(AVSubtitleRect));
        sub->num_rects = 1;
    }
    av_freep(&sub->rects[0].bitmap);
    sub->rects[0].x = x; sub->rects[0].y = y;
    sub->rects[0].w = w; sub->rects[0].h = h;
    sub->rects[0].linesize = w;
    sub->rects[0].bitmap = av_malloc(w * h);
    sub->rects[0].nb_colors = 4;
    sub->rects[0].rgba_palette = av_malloc(sub->rects[0].nb_colors * 4);

    // read palette
    for (i = 0; i < sub->rects[0].nb_colors; i++)
        sub->rects[0].rgba_palette[i] = bytestream_get_be24(&buf);
    // make all except background (first entry) non-transparent
    for (i = 1; i < sub->rects[0].nb_colors; i++)
        sub->rects[0].rgba_palette[i] |= 0xff000000;

    // process RLE-compressed data
    rlelen = FFMIN(rlelen, buf_end - buf);
    init_get_bits(&gb, buf, rlelen * 8);
    bitmap = sub->rects[0].bitmap;
    for (y = 0; y < h; y++) {
        // interlaced: do odd lines
        if (y == (h + 1) / 2) bitmap = sub->rects[0].bitmap + w;
        for (x = 0; x < w; ) {
            int log2 = ff_log2_tab[show_bits(&gb, 8)];
            int run = get_bits(&gb, 14 - 4 * (log2 >> 1));
            int color = get_bits(&gb, 2);
            run = FFMIN(run, w - x);
            // run length 0 means till end of row
            if (!run) run = w - x;
            memset(bitmap, color, run);
            bitmap += run;
            x += run;
        }
        // interlaced, skip every second line
        bitmap += w;
        align_get_bits(&gb);
    }
    *data_size = 1;
    return buf_size;
}
Exemple #8
0
static int decode_frame(AVCodecContext *avctx,
                        void *data, int *data_size,
                        AVPacket *avpkt)
{
    AnmContext *s = avctx->priv_data;
    const uint8_t *buf = avpkt->data;
    const int buf_size = avpkt->size;
    const uint8_t *buf_end = buf + buf_size;
    uint8_t *dst, *dst_end;
    int count;

    if(avctx->reget_buffer(avctx, &s->frame) < 0){
        av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
        return -1;
    }
    dst     = s->frame.data[0];
    dst_end = s->frame.data[0] + s->frame.linesize[0]*avctx->height;

    if (buf[0] != 0x42) {
        av_log_ask_for_sample(avctx, "unknown record type\n");
        return buf_size;
    }
    if (buf[1]) {
        av_log_ask_for_sample(avctx, "padding bytes not supported\n");
        return buf_size;
    }
    buf += 4;

    s->x = 0;
    do {
        /* if statements are ordered by probability */
#define OP(buf, pixel, count) \
    op(&dst, dst_end, (buf), buf_end, (pixel), (count), &s->x, avctx->width, s->frame.linesize[0])

        int type = bytestream_get_byte(&buf);
        count = type & 0x7F;
        type >>= 7;
        if (count) {
            if (OP(type ? NULL : &buf, -1, count)) break;
        } else if (!type) {
            int pixel;
            count = bytestream_get_byte(&buf);  /* count==0 gives nop */
            pixel = bytestream_get_byte(&buf);
            if (OP(NULL, pixel, count)) break;
        } else {
            int pixel;
            type = bytestream_get_le16(&buf);
            count = type & 0x3FFF;
            type >>= 14;
            if (!count) {
                if (type == 0)
                    break; // stop
                if (type == 2) {
                    av_log_ask_for_sample(avctx, "unknown opcode");
                    return AVERROR_INVALIDDATA;
                }
                continue;
            }
            pixel = type == 3 ? bytestream_get_byte(&buf) : -1;
            if (type == 1) count += 0x4000;
            if (OP(type == 2 ? &buf : NULL, pixel, count)) break;
        }
    } while (buf + 1 < buf_end);

    *data_size = sizeof(AVFrame);
    *(AVFrame*)data = s->frame;
    return buf_size;
}
Exemple #9
0
static int decode_frame(AVCodecContext *avctx,
                        void *data, int *data_size,
                        AVPacket *avpkt)
{
    PicContext *s = avctx->priv_data;
    int buf_size = avpkt->size;
    const uint8_t *buf = avpkt->data;
    const uint8_t *buf_end = avpkt->data + buf_size;
    uint32_t *palette;
    int bits_per_plane, bpp, etype, esize, npal;
    int i, x, y, plane;

    if (buf_size < 11)
        return AVERROR_INVALIDDATA;

    if (bytestream_get_le16(&buf) != 0x1234)
        return AVERROR_INVALIDDATA;
    s->width  = bytestream_get_le16(&buf);
    s->height = bytestream_get_le16(&buf);
    buf += 4;
    bits_per_plane    = *buf & 0xF;
    s->nb_planes      = (*buf++ >> 4) + 1;
    bpp               = s->nb_planes ? bits_per_plane*s->nb_planes : bits_per_plane;
    if (bits_per_plane > 8 || bpp < 1 || bpp > 32) {
        av_log_ask_for_sample(s, "unsupported bit depth\n");
        return AVERROR_INVALIDDATA;
    }

    if (*buf == 0xFF) {
        buf += 2;
        etype  = bytestream_get_le16(&buf);
        esize  = bytestream_get_le16(&buf);
        if (buf_end - buf < esize)
            return AVERROR_INVALIDDATA;
    } else {
        etype = -1;
        esize = 0;
    }

    avctx->pix_fmt = PIX_FMT_PAL8;

    if (s->width != avctx->width && s->height != avctx->height) {
        if (av_image_check_size(s->width, s->height, 0, avctx) < 0)
            return -1;
        avcodec_set_dimensions(avctx, s->width, s->height);
        if (s->frame.data[0])
            avctx->release_buffer(avctx, &s->frame);
    }

    if (avctx->get_buffer(avctx, &s->frame) < 0){
        av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
        return -1;
    }
    memset(s->frame.data[0], 0, s->height * s->frame.linesize[0]);
    s->frame.pict_type           = FF_I_TYPE;
    s->frame.palette_has_changed = 1;

    palette = (uint32_t*)s->frame.data[1];
    if (etype == 1 && esize > 1 && *buf < 6) {
        int idx = *buf;
        npal = 4;
        for (i = 0; i < npal; i++)
            palette[i] = ff_cga_palette[ cga_mode45_index[idx][i] ];
    } else if (etype == 2) {
        npal = FFMIN(esize, 16);
        for (i = 0; i < npal; i++)
            palette[i] = ff_cga_palette[ FFMIN(buf[i], 16)];
    } else if (etype == 3) {
        npal = FFMIN(esize, 16);
        for (i = 0; i < npal; i++)
            palette[i] = ff_ega_palette[ FFMIN(buf[i], 63)];
    } else if (etype == 4 || etype == 5) {
        npal = FFMIN(esize / 3, 256);
        for (i = 0; i < npal; i++)
            palette[i] = AV_RB24(buf + i*3) << 2;
    } else {
        if (bpp == 1) {
            npal = 2;
            palette[0] = 0x000000;
            palette[1] = 0xFFFFFF;
        } else if (bpp == 2) {
            npal = 4;
            for (i = 0; i < npal; i++)
                palette[i] = ff_cga_palette[ cga_mode45_index[0][i] ];
        } else {
            npal = 16;
            memcpy(palette, ff_cga_palette, npal * 4);
        }
    }
    // fill remaining palette entries
    memset(palette + npal, 0, AVPALETTE_SIZE - npal * 4);
    buf += esize;


    x = 0;
    y = s->height - 1;
    plane = 0;
    if (bytestream_get_le16(&buf)) {
        while (buf_end - buf >= 6) {
            const uint8_t *buf_pend = buf + FFMIN(AV_RL16(buf), buf_end - buf);
            //ignore uncompressed block size reported at buf[2]
            int marker = buf[4];
            buf += 5;

            while (plane < s->nb_planes && buf_pend - buf >= 1) {
                int run = 1;
                int val = *buf++;
                if (val == marker) {
                    run = *buf++;
                    if (run == 0)
                        run = bytestream_get_le16(&buf);
                    val = *buf++;
                }
                if (buf > buf_end)
                    break;

                if (bits_per_plane == 8) {
                    picmemset_8bpp(s, val, run, &x, &y);
                    if (y < 0)
                        break;
                } else {
                    picmemset(s, val, run, &x, &y, &plane, bits_per_plane);
                }
            }
        }
    } else {
        av_log_ask_for_sample(s, "uncompressed image\n");
        return buf_size;
    }

    *data_size = sizeof(AVFrame);
    *(AVFrame*)data = s->frame;
    return buf_size;
}
static int decode_frame(AVCodecContext *avctx, void *data, int *data_size,
                        AVPacket *avpkt) {
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    AVSubtitle *sub = data;
    const uint8_t *buf_end = buf + buf_size;
    uint8_t *bitmap;
    int w, h, x, y, rlelen, i;
    int64_t packet_time = 0;
    GetBitContext gb;

    memset(sub, 0, sizeof(*sub));

    // check that at least header fits
    if (buf_size < 27 + 7 * 2 + 4 * 3) {
        av_log(avctx, AV_LOG_ERROR, "coded frame too small\n");
        return -1;
    }

    // read start and end time
    if (buf[0] != '[' || buf[13] != '-' || buf[26] != ']') {
        av_log(avctx, AV_LOG_ERROR, "invalid time code\n");
        return -1;
    }
    if (avpkt->pts != AV_NOPTS_VALUE)
        packet_time = av_rescale_q(avpkt->pts, AV_TIME_BASE_Q, (AVRational){1, 1000});
    sub->start_display_time = parse_timecode(buf +  1, packet_time);
    sub->end_display_time   = parse_timecode(buf + 14, packet_time);
    buf += 27;

    // read header
    w = bytestream_get_le16(&buf);
    h = bytestream_get_le16(&buf);
    if (avcodec_check_dimensions(avctx, w, h) < 0)
        return -1;
    x = bytestream_get_le16(&buf);
    y = bytestream_get_le16(&buf);

#ifdef SUPPORT_DIVX_DRM
	if((video_height - (y+h)) > 30)
	{
		y = video_height-30-h-1;
	}
#endif /* end of SUPPORT_DIVX_DRM */	
	   
    // skip bottom right position, it gives no new information
    bytestream_get_le16(&buf);
    bytestream_get_le16(&buf);
    rlelen = bytestream_get_le16(&buf);

    // allocate sub and set values
    sub->rects =  av_mallocz(sizeof(*sub->rects));
    sub->rects[0] = av_mallocz(sizeof(*sub->rects[0]));
    sub->num_rects = 1;
    sub->rects[0]->x = x; sub->rects[0]->y = y;
    sub->rects[0]->w = w; sub->rects[0]->h = h;
    sub->rects[0]->type = SUBTITLE_BITMAP;
    sub->rects[0]->pict.linesize[0] = w;
    sub->rects[0]->pict.data[0] = av_malloc(w * h);
    sub->rects[0]->nb_colors = 4;
    sub->rects[0]->pict.data[1] = av_mallocz(AVPALETTE_SIZE);

    // read palette
    for (i = 0; i < sub->rects[0]->nb_colors; i++)
        ((uint32_t*)sub->rects[0]->pict.data[1])[i] = bytestream_get_be24(&buf);
    // make all except background (first entry) non-transparent
#if 1
	if(sub_type == 2)	//DXSA
	{
		for (i = 0; i < sub->rects[0]->nb_colors; i++)
		{
			if(buf[i])
				((uint32_t*)sub->rects[0]->pict.data[1])[i] |= 0xff000000;
		}
		if(buf[0] == buf[1] && buf[0] == buf[2] && buf[0] == buf[3] && buf[1] == buf[2] && buf[1] == buf[3] && buf[2] == buf[3] && buf[0] < 0xff)
		{
			transport_float = (float)buf[0] / 256.0;
		}
		
		buf += 4;
	}
	else
	{
		for (i = 1; i < sub->rects[0]->nb_colors; i++)
			((uint32_t*)sub->rects[0]->pict.data[1])[i] |= 0xff000000;
	}
#else
    for (i = 1; i < sub->rects[0]->nb_colors; i++)
        ((uint32_t*)sub->rects[0]->pict.data[1])[i] |= 0xff000000;
#endif

    // process RLE-compressed data
    rlelen = FFMIN(rlelen, buf_end - buf);
    init_get_bits(&gb, buf, rlelen * 8);
    bitmap = sub->rects[0]->pict.data[0];
    for (y = 0; y < h; y++) {
        // interlaced: do odd lines
        if (y == (h + 1) / 2) bitmap = sub->rects[0]->pict.data[0] + w;
        for (x = 0; x < w; ) {
            int log2 = ff_log2_tab[show_bits(&gb, 8)];
            int run = get_bits(&gb, 14 - 4 * (log2 >> 1));
            int color = get_bits(&gb, 2);
            run = FFMIN(run, w - x);
            // run length 0 means till end of row
            if (!run) run = w - x;
            memset(bitmap, color, run);
            bitmap += run;
            x += run;
        }
        // interlaced, skip every second line
        bitmap += w;
        align_get_bits(&gb);
    }
    *data_size = 1;
    return buf_size;
}
Exemple #11
0
static int bethsoftvid_decode_frame(AVCodecContext *avctx,
                              void *data, int *data_size,
                              AVPacket *avpkt)
{
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    BethsoftvidContext * vid = avctx->priv_data;
    char block_type;
    uint8_t * dst;
    uint8_t * frame_end;
    int remaining = avctx->width;          // number of bytes remaining on a line
    const int wrap_to_next_line = vid->frame.linesize[0] - avctx->width;
    int code;
    int yoffset;

    if (avctx->reget_buffer(avctx, &vid->frame)) {
        av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
        return -1;
    }
    dst = vid->frame.data[0];
    frame_end = vid->frame.data[0] + vid->frame.linesize[0] * avctx->height;

    switch(block_type = *buf++){
        case PALETTE_BLOCK:
            set_palette(&vid->frame, buf);
            return 0;
        case VIDEO_YOFF_P_FRAME:
            yoffset = bytestream_get_le16(&buf);
            if(yoffset >= avctx->height)
                return -1;
            dst += vid->frame.linesize[0] * yoffset;
    }

    // main code
    while((code = *buf++)){
        int length = code & 0x7f;

        // copy any bytes starting at the current position, and ending at the frame width
        while(length > remaining){
            if(code < 0x80)
                bytestream_get_buffer(&buf, dst, remaining);
            else if(block_type == VIDEO_I_FRAME)
                memset(dst, buf[0], remaining);
            length -= remaining;      // decrement the number of bytes to be copied
            dst += remaining + wrap_to_next_line;    // skip over extra bytes at end of frame
            remaining = avctx->width;
            if(dst == frame_end)
                goto end;
        }

        // copy any remaining bytes after / if line overflows
        if(code < 0x80)
            bytestream_get_buffer(&buf, dst, length);
        else if(block_type == VIDEO_I_FRAME)
            memset(dst, *buf++, length);
        remaining -= length;
        dst += length;
    }
    end:

    *data_size = sizeof(AVFrame);
    *(AVFrame*)data = vid->frame;

    return buf_size;
}
Exemple #12
0
static int gif_read_extension(GifState *s)
{
    int ext_code, ext_len, i, gce_flags, gce_transparent_index;

    /* There must be at least 2 bytes:
     * 1 for extension label and 1 for extension length. */
    if (s->bytestream_end < s->bytestream + 2)
        return AVERROR_INVALIDDATA;

    ext_code = bytestream_get_byte(&s->bytestream);
    ext_len = bytestream_get_byte(&s->bytestream);

    av_dlog(s->avctx, "ext_code=0x%x len=%d\n", ext_code, ext_len);

    switch(ext_code) {
    case GIF_GCE_EXT_LABEL:
        if (ext_len != 4)
            goto discard_ext;

        /* We need at least 5 bytes more: 4 is for extension body
         * and 1 for next block size. */
        if (s->bytestream_end < s->bytestream + 5)
            return AVERROR_INVALIDDATA;

        s->transparent_color_index = -1;
        gce_flags = bytestream_get_byte(&s->bytestream);
        bytestream_get_le16(&s->bytestream);    // delay during which the frame is shown
        gce_transparent_index = bytestream_get_byte(&s->bytestream);
        if (gce_flags & 0x01)
            s->transparent_color_index = gce_transparent_index;
        else
            s->transparent_color_index = -1;
        s->gce_disposal = (gce_flags >> 2) & 0x7;

        av_dlog(s->avctx, "gce_flags=%x tcolor=%d disposal=%d\n",
               gce_flags,
               s->transparent_color_index, s->gce_disposal);

        if (s->gce_disposal > 3) {
            s->gce_disposal = GCE_DISPOSAL_NONE;
            av_dlog(s->avctx, "invalid value in gce_disposal (%d). Using default value of 0.\n", ext_len);
        }

        ext_len = bytestream_get_byte(&s->bytestream);
        break;
    }

    /* NOTE: many extension blocks can come after */
 discard_ext:
    while (ext_len != 0) {
        /* There must be at least ext_len bytes and 1 for next block size byte. */
        if (s->bytestream_end < s->bytestream + ext_len + 1)
            return AVERROR_INVALIDDATA;

        for (i = 0; i < ext_len; i++)
            bytestream_get_byte(&s->bytestream);
        ext_len = bytestream_get_byte(&s->bytestream);

        av_dlog(s->avctx, "ext_len1=%d\n", ext_len);
    }
    return 0;
}
Exemple #13
0
static int gif_read_image(GifState *s)
{
    int left, top, width, height, bits_per_pixel, code_size, flags;
    int is_interleaved, has_local_palette, y, pass, y1, linesize, pal_size;
    uint32_t *ptr, *pal, *px, *pr, *ptr1;
    int ret;
    uint8_t *idx;

    /* At least 9 bytes of Image Descriptor. */
    if (s->bytestream_end < s->bytestream + 9)
        return AVERROR_INVALIDDATA;

    left = bytestream_get_le16(&s->bytestream);
    top = bytestream_get_le16(&s->bytestream);
    width = bytestream_get_le16(&s->bytestream);
    height = bytestream_get_le16(&s->bytestream);
    flags = bytestream_get_byte(&s->bytestream);
    is_interleaved = flags & 0x40;
    has_local_palette = flags & 0x80;
    bits_per_pixel = (flags & 0x07) + 1;

    av_dlog(s->avctx, "image x=%d y=%d w=%d h=%d\n", left, top, width, height);

    if (has_local_palette) {
        pal_size = 1 << bits_per_pixel;

        if (s->bytestream_end < s->bytestream + pal_size * 3)
            return AVERROR_INVALIDDATA;

        gif_read_palette(&s->bytestream, s->local_palette, pal_size);
        pal = s->local_palette;
    } else {
        if (!s->has_global_palette) {
            av_log(s->avctx, AV_LOG_FATAL, "picture doesn't have either global or local palette.\n");
            return AVERROR_INVALIDDATA;
        }

        pal = s->global_palette;
    }

    if (s->keyframe) {
        if (s->transparent_color_index == -1 && s->has_global_palette) {
            /* transparency wasn't set before the first frame, fill with background color */
            gif_fill(&s->picture, s->bg_color);
        } else {
            /* otherwise fill with transparent color.
             * this is necessary since by default picture filled with 0x80808080. */
            gif_fill(&s->picture, s->trans_color);
        }
    }

    /* verify that all the image is inside the screen dimensions */
    if (left + width > s->screen_width ||
        top + height > s->screen_height)
        return AVERROR(EINVAL);

    /* process disposal method */
    if (s->gce_prev_disposal == GCE_DISPOSAL_BACKGROUND) {
        gif_fill_rect(&s->picture, s->stored_bg_color, s->gce_l, s->gce_t, s->gce_w, s->gce_h);
    } else if (s->gce_prev_disposal == GCE_DISPOSAL_RESTORE) {
        gif_copy_img_rect(s->stored_img, (uint32_t *)s->picture.data[0],
            s->picture.linesize[0] / sizeof(uint32_t), s->gce_l, s->gce_t, s->gce_w, s->gce_h);
    }

    s->gce_prev_disposal = s->gce_disposal;

    if (s->gce_disposal != GCE_DISPOSAL_NONE) {
        s->gce_l = left;  s->gce_t = top;
        s->gce_w = width; s->gce_h = height;

        if (s->gce_disposal == GCE_DISPOSAL_BACKGROUND) {
            if (s->background_color_index == s->transparent_color_index)
                s->stored_bg_color = s->trans_color;
            else
                s->stored_bg_color = s->bg_color;
        } else if (s->gce_disposal == GCE_DISPOSAL_RESTORE) {
            av_fast_malloc(&s->stored_img, &s->stored_img_size, s->picture.linesize[0] * s->picture.height);
            if (!s->stored_img)
                return AVERROR(ENOMEM);

            gif_copy_img_rect((uint32_t *)s->picture.data[0], s->stored_img,
                s->picture.linesize[0] / sizeof(uint32_t), left, top, width, height);
        }
    }

    /* Expect at least 2 bytes: 1 for lzw code size and 1 for block size. */
    if (s->bytestream_end < s->bytestream + 2)
        return AVERROR_INVALIDDATA;

    /* now get the image data */
    code_size = bytestream_get_byte(&s->bytestream);
    if ((ret = ff_lzw_decode_init(s->lzw, code_size, s->bytestream,
                       s->bytestream_end - s->bytestream, FF_LZW_GIF)) < 0) {
        av_log(s->avctx, AV_LOG_ERROR, "LZW init failed\n");
        return ret;
    }

    /* read all the image */
    linesize = s->picture.linesize[0] / sizeof(uint32_t);
    ptr1 = (uint32_t *)s->picture.data[0] + top * linesize + left;
    ptr = ptr1;
    pass = 0;
    y1 = 0;
    for (y = 0; y < height; y++) {
        if (ff_lzw_decode(s->lzw, s->idx_line, width) == 0)
            goto decode_tail;

        pr = ptr + width;

        for (px = ptr, idx = s->idx_line; px < pr; px++, idx++) {
            if (*idx != s->transparent_color_index)
                *px = pal[*idx];
        }

        if (is_interleaved) {
            switch(pass) {
            default:
            case 0:
            case 1:
                y1 += 8;
                ptr += linesize * 8;
                if (y1 >= height) {
                    y1 = pass ? 2 : 4;
                    ptr = ptr1 + linesize * y1;
                    pass++;
                }
                break;
            case 2:
                y1 += 4;
                ptr += linesize * 4;
                if (y1 >= height) {
                    y1 = 1;
                    ptr = ptr1 + linesize;
                    pass++;
                }
                break;
            case 3:
                y1 += 2;
                ptr += linesize * 2;
                break;
            }
        } else {
            ptr += linesize;
        }
    }

 decode_tail:
    /* read the garbage data until end marker is found */
    ff_lzw_decode_tail(s->lzw);
    s->bytestream = ff_lzw_cur_ptr(s->lzw);

    /* Graphic Control Extension's scope is single frame.
     * Remove its influence. */
    s->transparent_color_index = -1;
    s->gce_disposal = GCE_DISPOSAL_NONE;

    return 0;
}
static int dpcm_decode_frame(AVCodecContext *avctx, void *data, int *data_size,
                             AVPacket *avpkt)
{
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    const uint8_t *buf_end = buf + buf_size;
    DPCMContext *s = avctx->priv_data;
    int out = 0;
    int predictor[2];
    int ch = 0;
    int stereo = s->channels - 1;
    int16_t *output_samples = data;

    /* calculate output size */
    switch(avctx->codec->id) {
    case CODEC_ID_ROQ_DPCM:
        out = buf_size - 8;
        break;
    case CODEC_ID_INTERPLAY_DPCM:
        out = buf_size - 6 - s->channels;
        break;
    case CODEC_ID_XAN_DPCM:
        out = buf_size - 2 * s->channels;
        break;
    case CODEC_ID_SOL_DPCM:
        if (avctx->codec_tag != 3)
            out = buf_size * 2;
        else
            out = buf_size;
        break;
    }
    out *= av_get_bytes_per_sample(avctx->sample_fmt);
    if (out <= 0) {
        av_log(avctx, AV_LOG_ERROR, "packet is too small\n");
        return AVERROR(EINVAL);
    }
    if (*data_size < out) {
        av_log(avctx, AV_LOG_ERROR, "output buffer is too small\n");
        return AVERROR(EINVAL);
    }

    switch(avctx->codec->id) {

    case CODEC_ID_ROQ_DPCM:
        buf += 6;

        if (stereo) {
            predictor[1] = (int16_t)(bytestream_get_byte(&buf) << 8);
            predictor[0] = (int16_t)(bytestream_get_byte(&buf) << 8);
        } else {
            predictor[0] = (int16_t)bytestream_get_le16(&buf);
        }

        /* decode the samples */
        while (buf < buf_end) {
            predictor[ch] += s->roq_square_array[*buf++];
            predictor[ch]  = av_clip_int16(predictor[ch]);
            *output_samples++ = predictor[ch];

            /* toggle channel */
            ch ^= stereo;
        }
        break;

    case CODEC_ID_INTERPLAY_DPCM:
        buf += 6;  /* skip over the stream mask and stream length */

        for (ch = 0; ch < s->channels; ch++) {
            predictor[ch] = (int16_t)bytestream_get_le16(&buf);
            *output_samples++ = predictor[ch];
        }

        ch = 0;
        while (buf < buf_end) {
            predictor[ch] += interplay_delta_table[*buf++];
            predictor[ch]  = av_clip_int16(predictor[ch]);
            *output_samples++ = predictor[ch];

            /* toggle channel */
            ch ^= stereo;
        }
        break;

    case CODEC_ID_XAN_DPCM:
    {
        int shift[2] = { 4, 4 };

        for (ch = 0; ch < s->channels; ch++)
            predictor[ch] = (int16_t)bytestream_get_le16(&buf);

        ch = 0;
        while (buf < buf_end) {
            uint8_t n = *buf++;
            int16_t diff = (n & 0xFC) << 8;
            if ((n & 0x03) == 3)
                shift[ch]++;
            else
                shift[ch] -= (2 * (n & 3));
            /* saturate the shifter to a lower limit of 0 */
            if (shift[ch] < 0)
                shift[ch] = 0;

            diff >>= shift[ch];
            predictor[ch] += diff;

            predictor[ch] = av_clip_int16(predictor[ch]);
            *output_samples++ = predictor[ch];

            /* toggle channel */
            ch ^= stereo;
        }
        break;
    }
    case CODEC_ID_SOL_DPCM:
        if (avctx->codec_tag != 3) {
            uint8_t *output_samples_u8 = data;
            while (buf < buf_end) {
                uint8_t n = *buf++;

                s->sample[0] += s->sol_table[n >> 4];
                s->sample[0]  = av_clip_uint8(s->sample[0]);
                *output_samples_u8++ = s->sample[0];

                s->sample[stereo] += s->sol_table[n & 0x0F];
                s->sample[stereo]  = av_clip_uint8(s->sample[stereo]);
                *output_samples_u8++ = s->sample[stereo];
            }
        } else {
            while (buf < buf_end) {
Exemple #15
0
static int decode_frame(AVCodecContext *avctx, void *data,
                            int *data_size, AVPacket *avpkt)
{
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    C93DecoderContext * const c93 = avctx->priv_data;
    AVFrame * const newpic = &c93->pictures[c93->currentpic];
    AVFrame * const oldpic = &c93->pictures[c93->currentpic^1];
    AVFrame *picture = data;
    uint8_t *out;
    int stride, i, x, y, bt = 0;

    c93->currentpic ^= 1;

    newpic->reference = 1;
    newpic->buffer_hints = FF_BUFFER_HINTS_VALID | FF_BUFFER_HINTS_PRESERVE |
                         FF_BUFFER_HINTS_REUSABLE | FF_BUFFER_HINTS_READABLE;
    if (avctx->reget_buffer(avctx, newpic)) {
        av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
        return -1;
    }

    stride = newpic->linesize[0];

    if (buf[0] & C93_FIRST_FRAME) {
        newpic->pict_type = FF_I_TYPE;
        newpic->key_frame = 1;
    } else {
        newpic->pict_type = FF_P_TYPE;
        newpic->key_frame = 0;
    }

    if (*buf++ & C93_HAS_PALETTE) {
        uint32_t *palette = (uint32_t *) newpic->data[1];
        const uint8_t *palbuf = buf + buf_size - 768 - 1;
        for (i = 0; i < 256; i++) {
            palette[i] = bytestream_get_be24(&palbuf);
        }
    } else {
        if (oldpic->data[1])
            memcpy(newpic->data[1], oldpic->data[1], 256 * 4);
    }

    for (y = 0; y < HEIGHT; y += 8) {
        out = newpic->data[0] + y * stride;
        for (x = 0; x < WIDTH; x += 8) {
            uint8_t *copy_from = oldpic->data[0];
            unsigned int offset, j;
            uint8_t cols[4], grps[4];
            C93BlockType block_type;

            if (!bt)
                bt = *buf++;

            block_type= bt & 0x0F;
            switch (block_type) {
            case C93_8X8_FROM_PREV:
                offset = bytestream_get_le16(&buf);
                if (copy_block(avctx, out, copy_from, offset, 8, stride))
                    return -1;
                break;

            case C93_4X4_FROM_CURR:
                copy_from = newpic->data[0];
            case C93_4X4_FROM_PREV:
                for (j = 0; j < 8; j += 4) {
                    for (i = 0; i < 8; i += 4) {
                        offset = bytestream_get_le16(&buf);
                        if (copy_block(avctx, &out[j*stride+i],
                                           copy_from, offset, 4, stride))
                            return -1;
                    }
                }
                break;

            case C93_8X8_2COLOR:
                bytestream_get_buffer(&buf, cols, 2);
                for (i = 0; i < 8; i++) {
                    draw_n_color(out + i*stride, stride, 8, 1, 1, cols,
                                     NULL, *buf++);
                }

                break;

            case C93_4X4_2COLOR:
            case C93_4X4_4COLOR:
            case C93_4X4_4COLOR_GRP:
                for (j = 0; j < 8; j += 4) {
                    for (i = 0; i < 8; i += 4) {
                        if (block_type == C93_4X4_2COLOR) {
                            bytestream_get_buffer(&buf, cols, 2);
                            draw_n_color(out + i + j*stride, stride, 4, 4,
                                    1, cols, NULL, bytestream_get_le16(&buf));
                        } else if (block_type == C93_4X4_4COLOR) {
                            bytestream_get_buffer(&buf, cols, 4);
                            draw_n_color(out + i + j*stride, stride, 4, 4,
                                    2, cols, NULL, bytestream_get_le32(&buf));
                        } else {
                            bytestream_get_buffer(&buf, grps, 4);
                            draw_n_color(out + i + j*stride, stride, 4, 4,
                                    1, cols, grps, bytestream_get_le16(&buf));
                        }
                    }
                }
                break;

            case C93_NOOP:
                break;

            case C93_8X8_INTRA:
                for (j = 0; j < 8; j++)
                    bytestream_get_buffer(&buf, out + j*stride, 8);
                break;

            default:
                av_log(avctx, AV_LOG_ERROR, "unexpected type %x at %dx%d\n",
                       block_type, x, y);
                return -1;
            }
            bt >>= 4;
            out += 8;
        }
    }

    *picture = *newpic;
    *data_size = sizeof(AVFrame);

    return buf_size;
}
Exemple #16
0
static int decode_frame(AVCodecContext *avctx, void *data, int *data_size,
                        AVPacket *avpkt) {
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    AVSubtitle *sub = data;
    const uint8_t *buf_end = buf + buf_size;
    uint8_t *bitmap;
    int w, h, x, y, i;
    int64_t packet_time = 0;
    GetBitContext gb;
    int has_alpha = avctx->codec_tag == MKTAG('D','X','S','A');

    // check that at least header fits
    if (buf_size < 27 + 7 * 2 + 4 * 3) {
        av_log(avctx, AV_LOG_ERROR, "coded frame too small\n");
        return -1;
    }

    // read start and end time
    if (buf[0] != '[' || buf[13] != '-' || buf[26] != ']') {
        av_log(avctx, AV_LOG_ERROR, "invalid time code\n");
        return -1;
    }
    if (avpkt->pts != AV_NOPTS_VALUE)
        packet_time = av_rescale_q(avpkt->pts, AV_TIME_BASE_Q, (AVRational){1, 1000});
    sub->start_display_time = parse_timecode(buf +  1, packet_time);
    sub->end_display_time   = parse_timecode(buf + 14, packet_time);
    buf += 27;

    // read header
    w = bytestream_get_le16(&buf);
    h = bytestream_get_le16(&buf);
    if (av_image_check_size(w, h, 0, avctx) < 0)
        return -1;
    x = bytestream_get_le16(&buf);
    y = bytestream_get_le16(&buf);
    // skip bottom right position, it gives no new information
    bytestream_get_le16(&buf);
    bytestream_get_le16(&buf);
    // The following value is supposed to indicate the start offset
    // (relative to the palette) of the data for the second field,
    // however there are files  where it has a bogus value and thus
    // we just ignore it
    bytestream_get_le16(&buf);

    // allocate sub and set values
    sub->rects =  av_mallocz(sizeof(*sub->rects));
    sub->rects[0] = av_mallocz(sizeof(*sub->rects[0]));
    sub->num_rects = 1;
    sub->rects[0]->x = x; sub->rects[0]->y = y;
    sub->rects[0]->w = w; sub->rects[0]->h = h;
    sub->rects[0]->type = SUBTITLE_BITMAP;
    sub->rects[0]->pict.linesize[0] = w;
    sub->rects[0]->pict.data[0] = av_malloc(w * h);
    sub->rects[0]->nb_colors = 4;
    sub->rects[0]->pict.data[1] = av_mallocz(AVPALETTE_SIZE);

    // read palette
    for (i = 0; i < sub->rects[0]->nb_colors; i++)
        ((uint32_t*)sub->rects[0]->pict.data[1])[i] = bytestream_get_be24(&buf);
    // make all except background (first entry) non-transparent
    for (i = 0; i < sub->rects[0]->nb_colors; i++)
        ((uint32_t*)sub->rects[0]->pict.data[1])[i] |= (has_alpha ? *buf++ : (i ? 0xff : 0)) << 24;

    // process RLE-compressed data
    init_get_bits(&gb, buf, (buf_end - buf) * 8);
    bitmap = sub->rects[0]->pict.data[0];
    for (y = 0; y < h; y++) {
        // interlaced: do odd lines
        if (y == (h + 1) / 2) bitmap = sub->rects[0]->pict.data[0] + w;
        for (x = 0; x < w; ) {
            int log2 = ff_log2_tab[show_bits(&gb, 8)];
            int run = get_bits(&gb, 14 - 4 * (log2 >> 1));
            int color = get_bits(&gb, 2);
            run = FFMIN(run, w - x);
            // run length 0 means till end of row
            if (!run) run = w - x;
            memset(bitmap, color, run);
            bitmap += run;
            x += run;
        }
        // interlaced, skip every second line
        bitmap += w;
        align_get_bits(&gb);
    }
    *data_size = 1;
    return buf_size;
}
Exemple #17
0
static int pcm_decode_frame(AVCodecContext *avctx,
                            void *data, int *data_size,
                            AVPacket *avpkt)
{
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    PCMDecode *s = avctx->priv_data;
    int sample_size, c, n;
    short *samples;
    const uint8_t *src, *src8, *src2[MAX_CHANNELS];
    uint8_t *dstu8;
    int16_t *dst_int16_t;
    int32_t *dst_int32_t;
    int64_t *dst_int64_t;
    uint16_t *dst_uint16_t;
    uint32_t *dst_uint32_t;

    samples = data;
    src = buf;

    if (avctx->sample_fmt!=avctx->codec->sample_fmts[0]) {
        av_log(avctx, AV_LOG_ERROR, "invalid sample_fmt\n");
        return -1;
    }

    if(avctx->channels <= 0 || avctx->channels > MAX_CHANNELS){
        av_log(avctx, AV_LOG_ERROR, "PCM channels out of bounds\n");
        return -1;
    }

    sample_size = av_get_bits_per_sample(avctx->codec_id)/8;

    /* av_get_bits_per_sample returns 0 for CODEC_ID_PCM_DVD */
    if (CODEC_ID_PCM_DVD == avctx->codec_id)
        /* 2 samples are interleaved per block in PCM_DVD */
        sample_size = avctx->bits_per_coded_sample * 2 / 8;

    n = avctx->channels * sample_size;

    if(n && buf_size % n){
        if (buf_size < n) {
            av_log(avctx, AV_LOG_ERROR, "invalid PCM packet\n");
            return -1;
        }else
            buf_size -= buf_size % n;
    }

    buf_size= FFMIN(buf_size, *data_size/2);
    *data_size=0;

    n = buf_size/sample_size;

    switch(avctx->codec->id) {
    case CODEC_ID_PCM_U32LE:
        DECODE(uint32_t, le32, src, samples, n, 0, 0x80000000)
        break;
    case CODEC_ID_PCM_U32BE:
        DECODE(uint32_t, be32, src, samples, n, 0, 0x80000000)
        break;
    case CODEC_ID_PCM_S24LE:
        DECODE(int32_t, le24, src, samples, n, 8, 0)
        break;
    case CODEC_ID_PCM_S24BE:
        DECODE(int32_t, be24, src, samples, n, 8, 0)
        break;
    case CODEC_ID_PCM_U24LE:
        DECODE(uint32_t, le24, src, samples, n, 8, 0x800000)
        break;
    case CODEC_ID_PCM_U24BE:
        DECODE(uint32_t, be24, src, samples, n, 8, 0x800000)
        break;
    case CODEC_ID_PCM_S24DAUD:
        for(;n>0;n--) {
          uint32_t v = bytestream_get_be24(&src);
          v >>= 4; // sync flags are here
          *samples++ = av_reverse[(v >> 8) & 0xff] +
                       (av_reverse[v & 0xff] << 8);
        }
        break;
    case CODEC_ID_PCM_S16LE_PLANAR:
        n /= avctx->channels;
        for(c=0;c<avctx->channels;c++)
            src2[c] = &src[c*n*2];
        for(;n>0;n--)
            for(c=0;c<avctx->channels;c++)
                *samples++ = bytestream_get_le16(&src2[c]);
        src = src2[avctx->channels-1];
        break;
    case CODEC_ID_PCM_U16LE:
        DECODE(uint16_t, le16, src, samples, n, 0, 0x8000)
        break;
    case CODEC_ID_PCM_U16BE:
        DECODE(uint16_t, be16, src, samples, n, 0, 0x8000)
        break;
    case CODEC_ID_PCM_S8:
        dstu8= (uint8_t*)samples;
        for(;n>0;n--) {
            *dstu8++ = *src++ + 128;
        }
        samples= (short*)dstu8;
        break;
#if HAVE_BIGENDIAN
    case CODEC_ID_PCM_F64LE:
        DECODE(int64_t, le64, src, samples, n, 0, 0)
        break;
    case CODEC_ID_PCM_S32LE:
    case CODEC_ID_PCM_F32LE:
        DECODE(int32_t, le32, src, samples, n, 0, 0)
        break;
    case CODEC_ID_PCM_S16LE:
        DECODE(int16_t, le16, src, samples, n, 0, 0)
        break;
    case CODEC_ID_PCM_F64BE:
    case CODEC_ID_PCM_F32BE:
    case CODEC_ID_PCM_S32BE:
    case CODEC_ID_PCM_S16BE:
#else
    case CODEC_ID_PCM_F64BE:
        DECODE(int64_t, be64, src, samples, n, 0, 0)
        break;
    case CODEC_ID_PCM_F32BE:
    case CODEC_ID_PCM_S32BE:
        DECODE(int32_t, be32, src, samples, n, 0, 0)
        break;
    case CODEC_ID_PCM_S16BE:
        DECODE(int16_t, be16, src, samples, n, 0, 0)
        break;
    case CODEC_ID_PCM_F64LE:
    case CODEC_ID_PCM_F32LE:
    case CODEC_ID_PCM_S32LE:
    case CODEC_ID_PCM_S16LE:
#endif /* HAVE_BIGENDIAN */
    case CODEC_ID_PCM_U8:
        memcpy(samples, src, n*sample_size);
        src += n*sample_size;
        samples = (short*)((uint8_t*)data + n*sample_size);
        break;
    case CODEC_ID_PCM_ZORK:
        for(;n>0;n--) {
            int x= *src++;
            if(x&128) x-= 128;
            else      x = -x;
            *samples++ = x << 8;
        }
        break;
    case CODEC_ID_PCM_ALAW:
    case CODEC_ID_PCM_MULAW:
        for(;n>0;n--) {
            *samples++ = s->table[*src++];
        }
        break;
    case CODEC_ID_PCM_DVD:
        dst_int32_t = data;
        n /= avctx->channels;
        switch (avctx->bits_per_coded_sample) {
        case 16:
            while (n--) {
                c = avctx->channels;
                while (c--)
                    *dst_int32_t++ = bytestream_get_be16(&src) << 16;
            }
            break;
        case 20:
            while (n--) {
                c = avctx->channels;
                src8 = src + 4*c;
                while (c--) {
                    *dst_int32_t++ = (bytestream_get_be16(&src) << 16) + ((*src8   &0xf0) << 8);
                    *dst_int32_t++ = (bytestream_get_be16(&src) << 16) + ((*src8++ &0x0f) << 12);
                }
                src = src8;
            }
            break;
        case 24:
            while (n--) {
                c = avctx->channels;
                src8 = src + 4*c;
                while (c--) {
                    *dst_int32_t++ = (bytestream_get_be16(&src) << 16) + ((*src8++) << 8);
                    *dst_int32_t++ = (bytestream_get_be16(&src) << 16) + ((*src8++) << 8);
                }
                src = src8;
            }
            break;
        default:
            av_log(avctx, AV_LOG_ERROR,
                   "PCM DVD unsupported sample depth %i\n",
                   avctx->bits_per_coded_sample);
            return -1;
            break;
        }
        samples = (short *) dst_int32_t;
        break;
    default:
        return -1;
    }
    *data_size = (uint8_t *)samples - (uint8_t *)data;
    return src - buf;
}