Beispiel #1
0
static unsigned tget_long(GetByteContext *gb, int le)
{
    unsigned v = le ? bytestream2_get_le32u(gb) : bytestream2_get_be32u(gb);
    return v;
}
Beispiel #2
0
/** get sizes and offsets of image, tiles; number of components */
static int get_siz(J2kDecoderContext *s)
{
    int i, ret;

    if (bytestream2_get_bytes_left(&s->g) < 36)
        return AVERROR(EINVAL);

                        bytestream2_get_be16u(&s->g); // Rsiz (skipped)
             s->width = bytestream2_get_be32u(&s->g); // width
            s->height = bytestream2_get_be32u(&s->g); // height
    s->image_offset_x = bytestream2_get_be32u(&s->g); // X0Siz
    s->image_offset_y = bytestream2_get_be32u(&s->g); // Y0Siz

        s->tile_width = bytestream2_get_be32u(&s->g); // XTSiz
       s->tile_height = bytestream2_get_be32u(&s->g); // YTSiz
     s->tile_offset_x = bytestream2_get_be32u(&s->g); // XT0Siz
     s->tile_offset_y = bytestream2_get_be32u(&s->g); // YT0Siz
       s->ncomponents = bytestream2_get_be16u(&s->g); // CSiz

    if(s->tile_width<=0 || s->tile_height<=0)
        return AVERROR(EINVAL);

    if (bytestream2_get_bytes_left(&s->g) < 3 * s->ncomponents)
        return AVERROR(EINVAL);

    for (i = 0; i < s->ncomponents; i++){ // Ssiz_i XRsiz_i, YRsiz_i
        uint8_t x = bytestream2_get_byteu(&s->g);
        s->cbps[i] = (x & 0x7f) + 1;
        s->precision = FFMAX(s->cbps[i], s->precision);
        s->sgnd[i] = !!(x & 0x80);
        s->cdx[i] = bytestream2_get_byteu(&s->g);
        s->cdy[i] = bytestream2_get_byteu(&s->g);
    }

    s->numXtiles = ff_j2k_ceildiv(s->width - s->tile_offset_x, s->tile_width);
    s->numYtiles = ff_j2k_ceildiv(s->height - s->tile_offset_y, s->tile_height);

    if(s->numXtiles * (uint64_t)s->numYtiles > INT_MAX/sizeof(J2kTile))
        return AVERROR(EINVAL);

    s->tile = av_mallocz(s->numXtiles * s->numYtiles * sizeof(J2kTile));
    if (!s->tile)
        return AVERROR(ENOMEM);

    for (i = 0; i < s->numXtiles * s->numYtiles; i++){
        J2kTile *tile = s->tile + i;

        tile->comp = av_mallocz(s->ncomponents * sizeof(J2kComponent));
        if (!tile->comp)
            return AVERROR(ENOMEM);
    }

    s->avctx->width  = s->width  - s->image_offset_x;
    s->avctx->height = s->height - s->image_offset_y;

    switch(s->ncomponents){
    case 1:
        if (s->precision > 8) {
            s->avctx->pix_fmt = PIX_FMT_GRAY16;
        } else {
            s->avctx->pix_fmt = PIX_FMT_GRAY8;
        }
        break;
    case 3:
        if (s->precision > 8) {
            s->avctx->pix_fmt = PIX_FMT_RGB48;
        } else {
            s->avctx->pix_fmt = PIX_FMT_RGB24;
        }
        break;
    case 4:
        s->avctx->pix_fmt = PIX_FMT_RGBA;
        break;
    }

    if (s->picture.data[0])
        s->avctx->release_buffer(s->avctx, &s->picture);

    if ((ret = s->avctx->get_buffer(s->avctx, &s->picture)) < 0)
        return ret;

    s->picture.pict_type = AV_PICTURE_TYPE_I;
    s->picture.key_frame = 1;

    return 0;
}
Beispiel #3
0
static int xwd_decode_frame(AVCodecContext *avctx, void *data,
                            int *got_frame, AVPacket *avpkt)
{
    AVFrame *p = data;
    const uint8_t *buf = avpkt->data;
    int i, ret, buf_size = avpkt->size;
    uint32_t version, header_size, vclass, ncolors;
    uint32_t xoffset, be, bpp, lsize, rsize;
    uint32_t pixformat, pixdepth, bunit, bitorder, bpad;
    uint32_t rgb[3];
    uint8_t *ptr;
    GetByteContext gb;

    if (buf_size < XWD_HEADER_SIZE)
        return AVERROR_INVALIDDATA;

    bytestream2_init(&gb, buf, buf_size);
    header_size = bytestream2_get_be32u(&gb);

    version = bytestream2_get_be32u(&gb);
    if (version != XWD_VERSION) {
        av_log(avctx, AV_LOG_ERROR, "unsupported version\n");
        return AVERROR_INVALIDDATA;
    }

    if (buf_size < header_size || header_size < XWD_HEADER_SIZE) {
        av_log(avctx, AV_LOG_ERROR, "invalid header size\n");
        return AVERROR_INVALIDDATA;
    }

    pixformat     = bytestream2_get_be32u(&gb);
    pixdepth      = bytestream2_get_be32u(&gb);
    avctx->width  = bytestream2_get_be32u(&gb);
    avctx->height = bytestream2_get_be32u(&gb);
    xoffset       = bytestream2_get_be32u(&gb);
    be            = bytestream2_get_be32u(&gb);
    bunit         = bytestream2_get_be32u(&gb);
    bitorder      = bytestream2_get_be32u(&gb);
    bpad          = bytestream2_get_be32u(&gb);
    bpp           = bytestream2_get_be32u(&gb);
    lsize         = bytestream2_get_be32u(&gb);
    vclass        = bytestream2_get_be32u(&gb);
    rgb[0]        = bytestream2_get_be32u(&gb);
    rgb[1]        = bytestream2_get_be32u(&gb);
    rgb[2]        = bytestream2_get_be32u(&gb);
    bytestream2_skipu(&gb, 8);
    ncolors       = bytestream2_get_be32u(&gb);
    bytestream2_skipu(&gb, header_size - (XWD_HEADER_SIZE - 20));

    av_log(avctx, AV_LOG_DEBUG,
           "pixformat %"PRIu32", pixdepth %"PRIu32", bunit %"PRIu32", bitorder %"PRIu32", bpad %"PRIu32"\n",
           pixformat, pixdepth, bunit, bitorder, bpad);
    av_log(avctx, AV_LOG_DEBUG,
           "vclass %"PRIu32", ncolors %"PRIu32", bpp %"PRIu32", be %"PRIu32", lsize %"PRIu32", xoffset %"PRIu32"\n",
           vclass, ncolors, bpp, be, lsize, xoffset);
    av_log(avctx, AV_LOG_DEBUG,
           "red %0"PRIx32", green %0"PRIx32", blue %0"PRIx32"\n",
           rgb[0], rgb[1], rgb[2]);

    if (pixformat > XWD_Z_PIXMAP) {
        av_log(avctx, AV_LOG_ERROR, "invalid pixmap format\n");
        return AVERROR_INVALIDDATA;
    }

    if (pixdepth == 0 || pixdepth > 32) {
        av_log(avctx, AV_LOG_ERROR, "invalid pixmap depth\n");
        return AVERROR_INVALIDDATA;
    }

    if (xoffset) {
        avpriv_request_sample(avctx, "xoffset %"PRIu32"", xoffset);
        return AVERROR_PATCHWELCOME;
    }

    if (be > 1) {
        av_log(avctx, AV_LOG_ERROR, "invalid byte order\n");
        return AVERROR_INVALIDDATA;
    }

    if (bitorder > 1) {
        av_log(avctx, AV_LOG_ERROR, "invalid bitmap bit order\n");
        return AVERROR_INVALIDDATA;
    }

    if (bunit != 8 && bunit != 16 && bunit != 32) {
        av_log(avctx, AV_LOG_ERROR, "invalid bitmap unit\n");
        return AVERROR_INVALIDDATA;
    }

    if (bpad != 8 && bpad != 16 && bpad != 32) {
        av_log(avctx, AV_LOG_ERROR, "invalid bitmap scan-line pad\n");
        return AVERROR_INVALIDDATA;
    }

    if (bpp == 0 || bpp > 32) {
        av_log(avctx, AV_LOG_ERROR, "invalid bits per pixel\n");
        return AVERROR_INVALIDDATA;
    }

    if (ncolors > 256) {
        av_log(avctx, AV_LOG_ERROR, "invalid number of entries in colormap\n");
        return AVERROR_INVALIDDATA;
    }

    if ((ret = av_image_check_size(avctx->width, avctx->height, 0, NULL)) < 0)
        return ret;

    rsize = FFALIGN(avctx->width * bpp, bpad) / 8;
    if (lsize < rsize) {
        av_log(avctx, AV_LOG_ERROR, "invalid bytes per scan-line\n");
        return AVERROR_INVALIDDATA;
    }

    if (bytestream2_get_bytes_left(&gb) < ncolors * XWD_CMAP_SIZE + (uint64_t)avctx->height * lsize) {
        av_log(avctx, AV_LOG_ERROR, "input buffer too small\n");
        return AVERROR_INVALIDDATA;
    }

    if (pixformat != XWD_Z_PIXMAP) {
        avpriv_report_missing_feature(avctx, "Pixmap format %"PRIu32, pixformat);
        return AVERROR_PATCHWELCOME;
    }

    avctx->pix_fmt = AV_PIX_FMT_NONE;
    switch (vclass) {
    case XWD_STATIC_GRAY:
    case XWD_GRAY_SCALE:
        if (bpp != 1 && bpp != 8)
            return AVERROR_INVALIDDATA;
        if (bpp == 1 && pixdepth == 1) {
            avctx->pix_fmt = AV_PIX_FMT_MONOWHITE;
        } else if (bpp == 8 && pixdepth == 8) {
            avctx->pix_fmt = AV_PIX_FMT_GRAY8;
        }
        break;
    case XWD_STATIC_COLOR:
    case XWD_PSEUDO_COLOR:
        if (bpp == 8)
            avctx->pix_fmt = AV_PIX_FMT_PAL8;
        break;
    case XWD_TRUE_COLOR:
    case XWD_DIRECT_COLOR:
        if (bpp != 16 && bpp != 24 && bpp != 32)
            return AVERROR_INVALIDDATA;
        if (bpp == 16 && pixdepth == 15) {
            if (rgb[0] == 0x7C00 && rgb[1] == 0x3E0 && rgb[2] == 0x1F)
                avctx->pix_fmt = be ? AV_PIX_FMT_RGB555BE : AV_PIX_FMT_RGB555LE;
            else if (rgb[0] == 0x1F && rgb[1] == 0x3E0 && rgb[2] == 0x7C00)
                avctx->pix_fmt = be ? AV_PIX_FMT_BGR555BE : AV_PIX_FMT_BGR555LE;
        } else if (bpp == 16 && pixdepth == 16) {
            if (rgb[0] == 0xF800 && rgb[1] == 0x7E0 && rgb[2] == 0x1F)
                avctx->pix_fmt = be ? AV_PIX_FMT_RGB565BE : AV_PIX_FMT_RGB565LE;
            else if (rgb[0] == 0x1F && rgb[1] == 0x7E0 && rgb[2] == 0xF800)
                avctx->pix_fmt = be ? AV_PIX_FMT_BGR565BE : AV_PIX_FMT_BGR565LE;
        } else if (bpp == 24) {
            if (rgb[0] == 0xFF0000 && rgb[1] == 0xFF00 && rgb[2] == 0xFF)
                avctx->pix_fmt = be ? AV_PIX_FMT_RGB24 : AV_PIX_FMT_BGR24;
            else if (rgb[0] == 0xFF && rgb[1] == 0xFF00 && rgb[2] == 0xFF0000)
                avctx->pix_fmt = be ? AV_PIX_FMT_BGR24 : AV_PIX_FMT_RGB24;
        } else if (bpp == 32) {
            if (rgb[0] == 0xFF0000 && rgb[1] == 0xFF00 && rgb[2] == 0xFF)
                avctx->pix_fmt = be ? AV_PIX_FMT_ARGB : AV_PIX_FMT_BGRA;
            else if (rgb[0] == 0xFF && rgb[1] == 0xFF00 && rgb[2] == 0xFF0000)
                avctx->pix_fmt = be ? AV_PIX_FMT_ABGR : AV_PIX_FMT_RGBA;
        }
        bytestream2_skipu(&gb, ncolors * XWD_CMAP_SIZE);
        break;
    default:
        av_log(avctx, AV_LOG_ERROR, "invalid visual class\n");
        return AVERROR_INVALIDDATA;
    }

    if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
        avpriv_request_sample(avctx,
                              "Unknown file: bpp %"PRIu32", pixdepth %"PRIu32", vclass %"PRIu32"",
                              bpp, pixdepth, vclass);
        return AVERROR_PATCHWELCOME;
    }

    if ((ret = ff_get_buffer(avctx, p, 0)) < 0)
        return ret;

    p->key_frame = 1;
    p->pict_type = AV_PICTURE_TYPE_I;

    if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
        uint32_t *dst = (uint32_t *)p->data[1];
        uint8_t red, green, blue;

        for (i = 0; i < ncolors; i++) {

            bytestream2_skipu(&gb, 4); // skip colormap entry number
            red    = bytestream2_get_byteu(&gb);
            bytestream2_skipu(&gb, 1);
            green  = bytestream2_get_byteu(&gb);
            bytestream2_skipu(&gb, 1);
            blue   = bytestream2_get_byteu(&gb);
            bytestream2_skipu(&gb, 3); // skip bitmask flag and padding

            dst[i] = 0xFFU << 24 | red << 16 | green << 8 | blue;
        }
    }

    ptr = p->data[0];
    for (i = 0; i < avctx->height; i++) {
        bytestream2_get_bufferu(&gb, ptr, rsize);
        bytestream2_skipu(&gb, lsize - rsize);
        ptr += p->linesize[0];
    }

    *got_frame       = 1;

    return buf_size;
}
Beispiel #4
0
static int pix_decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
                            AVPacket *avpkt)
{
    AVFrame *frame = data;

    int ret, i;
    GetByteContext gb;

    unsigned int bytes_pp;
    unsigned int magic[4];
    unsigned int chunk_type;
    unsigned int data_len;
    unsigned int bytes_per_scanline;
    unsigned int bytes_left;
    PixHeader hdr;

    bytestream2_init(&gb, avpkt->data, avpkt->size);

    magic[0] = bytestream2_get_be32(&gb);
    magic[1] = bytestream2_get_be32(&gb);
    magic[2] = bytestream2_get_be32(&gb);
    magic[3] = bytestream2_get_be32(&gb);

    if (magic[0] != 0x12 ||
        magic[1] != 0x08 ||
        magic[2] != 0x02 ||
        magic[3] != 0x02) {
        av_log(avctx, AV_LOG_ERROR, "Not a BRender PIX file.\n");
        return AVERROR_INVALIDDATA;
    }

    chunk_type = bytestream2_get_be32(&gb);
    if (chunk_type != HEADER1_CHUNK && chunk_type != HEADER2_CHUNK) {
        av_log(avctx, AV_LOG_ERROR, "Invalid chunk type %d.\n", chunk_type);
        return AVERROR_INVALIDDATA;
    }

    ret = pix_decode_header(&hdr, &gb);
    if (ret < 0) {
        av_log(avctx, AV_LOG_ERROR, "Invalid header length.\n");
        return ret;
    }
    switch (hdr.format) {
    case 3:
        avctx->pix_fmt = AV_PIX_FMT_PAL8;
        bytes_pp = 1;
        break;
    case 4:
        avctx->pix_fmt = AV_PIX_FMT_RGB555BE;
        bytes_pp = 2;
        break;
    case 5:
        avctx->pix_fmt = AV_PIX_FMT_RGB565BE;
        bytes_pp = 2;
        break;
    case 6:
        avctx->pix_fmt = AV_PIX_FMT_RGB24;
        bytes_pp = 3;
        break;
    case 7:
        avctx->pix_fmt = AV_PIX_FMT_0RGB;
        bytes_pp = 4;
        break;
    case 8: // ARGB
        avctx->pix_fmt = AV_PIX_FMT_ARGB;
        bytes_pp = 4;
        break;
    case 18:
        avctx->pix_fmt = AV_PIX_FMT_Y400A;
        bytes_pp = 2;
        break;
    default:
        avpriv_request_sample(avctx, "Format %d", hdr.format);
        return AVERROR_PATCHWELCOME;
    }

    if ((ret = ff_set_dimensions(avctx, hdr.width, hdr.height)) < 0)
        return ret;

    if ((ret = ff_get_buffer(avctx, frame, 0)) < 0)
        return ret;

    chunk_type = bytestream2_get_be32(&gb);

    if (avctx->pix_fmt == AV_PIX_FMT_PAL8 &&
        (chunk_type == HEADER1_CHUNK ||
         chunk_type == HEADER2_CHUNK)) {
        /* read palette data from data[1] */
        PixHeader palhdr;
        uint32_t *pal_out = (uint32_t *)frame->data[1];

        ret = pix_decode_header(&palhdr, &gb);
        if (ret < 0) {
            av_log(avctx, AV_LOG_ERROR, "Invalid palette header length.\n");
            return ret;
        }
        if (palhdr.format != 7)
            avpriv_request_sample(avctx, "Palette not in RGB format");

        chunk_type = bytestream2_get_be32(&gb);
        data_len = bytestream2_get_be32(&gb);
        bytestream2_skip(&gb, 8);
        if (chunk_type != IMAGE_DATA_CHUNK || data_len != 1032 ||
            bytestream2_get_bytes_left(&gb) < 1032) {
            av_log(avctx, AV_LOG_ERROR, "Invalid palette data.\n");
            return AVERROR_INVALIDDATA;
        }
        // palette data is surrounded by 8 null bytes (both top and bottom)
        // convert 0RGB to machine endian format (ARGB32)
        for (i = 0; i < 256; ++i)
            *pal_out++ = (0xFFU << 24) | bytestream2_get_be32u(&gb);
        bytestream2_skip(&gb, 8);

        frame->palette_has_changed = 1;

        chunk_type = bytestream2_get_be32(&gb);
    } else if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
        /* no palette supplied, use the default one */
        uint32_t *pal_out = (uint32_t *)frame->data[1];

        // TODO: add an AVOption to load custom palette files
        av_log(avctx, AV_LOG_WARNING,
               "Using default palette, colors might be off.\n");
        memcpy(pal_out, std_pal_table, sizeof(uint32_t) * 256);

        frame->palette_has_changed = 1;
    }

    data_len = bytestream2_get_be32(&gb);
    bytestream2_skip(&gb, 8);

    // read the image data to the buffer
    bytes_per_scanline = bytes_pp * hdr.width;
    bytes_left = bytestream2_get_bytes_left(&gb);

    if (chunk_type != IMAGE_DATA_CHUNK || data_len != bytes_left ||
        bytes_left / bytes_per_scanline < hdr.height) {
        av_log(avctx, AV_LOG_ERROR, "Invalid image data.\n");
        return AVERROR_INVALIDDATA;
    }

    av_image_copy_plane(frame->data[0], frame->linesize[0],
                        avpkt->data + bytestream2_tell(&gb),
                        bytes_per_scanline,
                        bytes_per_scanline, hdr.height);

    frame->pict_type = AV_PICTURE_TYPE_I;
    frame->key_frame = 1;
    *got_frame = 1;

    return avpkt->size;
}
Beispiel #5
0
static int redspark_read_header(AVFormatContext *s)
{
    AVIOContext *pb = s->pb;
    RedSparkContext *redspark = s->priv_data;
    AVCodecParameters *par;
    GetByteContext gbc;
    int i, coef_off, ret = 0;
    uint32_t key, data;
    uint8_t header[HEADER_SIZE];
    AVStream *st;

    st = avformat_new_stream(s, NULL);
    if (!st)
        return AVERROR(ENOMEM);
    par = st->codecpar;

    /* Decrypt header */
    data = avio_rb32(pb);
    key  = data ^ 0x52656453;
    data ^= key;
    AV_WB32(header, data);
    key = rol(key, 11);

    for (i = 4; i < HEADER_SIZE; i += 4) {
        key += rol(key, 3);
        data = avio_rb32(pb) ^ key;
        AV_WB32(header + i, data);
    }

    par->codec_id    = AV_CODEC_ID_ADPCM_THP;
    par->codec_type  = AVMEDIA_TYPE_AUDIO;

    bytestream2_init(&gbc, header, HEADER_SIZE);
    bytestream2_seek(&gbc, 0x3c, SEEK_SET);
    par->sample_rate = bytestream2_get_be32u(&gbc);
    if (par->sample_rate <= 0 || par->sample_rate > 96000) {
        av_log(s, AV_LOG_ERROR, "Invalid sample rate: %d\n", par->sample_rate);
        return AVERROR_INVALIDDATA;
    }

    st->duration = bytestream2_get_be32u(&gbc) * 14;
    redspark->samples_count = 0;
    bytestream2_skipu(&gbc, 10);
    par->channels = bytestream2_get_byteu(&gbc);
    if (!par->channels) {
        return AVERROR_INVALIDDATA;
    }

    coef_off = 0x54 + par->channels * 8;
    if (bytestream2_get_byteu(&gbc)) // Loop flag
        coef_off += 16;

    if (coef_off + par->channels * (32 + 14) > HEADER_SIZE) {
        return AVERROR_INVALIDDATA;
    }

    if (ff_alloc_extradata(par, 32 * par->channels)) {
        return AVERROR_INVALIDDATA;
    }

    /* Get the ADPCM table */
    bytestream2_seek(&gbc, coef_off, SEEK_SET);
    for (i = 0; i < par->channels; i++) {
        if (bytestream2_get_bufferu(&gbc, par->extradata + i * 32, 32) != 32) {
            return AVERROR_INVALIDDATA;
        }
        bytestream2_skipu(&gbc, 14);
    }

    avpriv_set_pts_info(st, 64, 1, par->sample_rate);

    return ret;
}