Пример #1
0
/**
 * Read a run length encoded SGI image.
 * @param out_buf output buffer
 * @param s the current image state
 * @return 0 if no error, else return error number.
 */
static int read_rle_sgi(uint8_t *out_buf, SgiState *s)
{
    uint8_t *dest_row;
    unsigned int len = s->height * s->depth * 4;
    GetByteContext g_table = s->g;
    unsigned int y, z;
    unsigned int start_offset;

    /* size of  RLE offset and length tables */
    if (len * 2  > bytestream2_get_bytes_left(&s->g)) {
        return AVERROR_INVALIDDATA;
    }

    for (z = 0; z < s->depth; z++) {
        dest_row = out_buf;
        for (y = 0; y < s->height; y++) {
            dest_row -= s->linesize;
            start_offset = bytestream2_get_be32(&g_table);
            bytestream2_seek(&s->g, start_offset, SEEK_SET);
            if (expand_rle_row(s, dest_row + z, dest_row + FFABS(s->linesize),
                               s->depth) != s->width) {
                return AVERROR_INVALIDDATA;
            }
        }
    }
    return 0;
}
Пример #2
0
/**
 * Read a run length encoded SGI image.
 * @param out_buf output buffer
 * @param s the current image state
 * @return 0 if no error, else return error code.
 */
static int read_rle_sgi(uint8_t *out_buf, SgiState *s)
{
    uint8_t *dest_row;
    unsigned int len = s->height * s->depth * 4;
    GetByteContext g_table = s->g;
    unsigned int y, z;
    unsigned int start_offset;
    int linesize, ret;

    /* size of  RLE offset and length tables */
    if (len * 2 > bytestream2_get_bytes_left(&s->g)) {
        return AVERROR_INVALIDDATA;
    }

    for (z = 0; z < s->depth; z++) {
        dest_row = out_buf;
        for (y = 0; y < s->height; y++) {
            linesize = s->width * s->depth * s->bytes_per_channel;
            dest_row -= s->linesize;
            start_offset = bytestream2_get_be32(&g_table);
            bytestream2_seek(&s->g, start_offset, SEEK_SET);
            if (s->bytes_per_channel == 1)
                ret = expand_rle_row8(s, dest_row + z, linesize, s->depth);
            else
                ret = expand_rle_row16(s, (uint16_t *)dest_row + z, linesize, s->depth);
            if (ret != s->width)
                return AVERROR_INVALIDDATA;
        }
    }
    return 0;
}
Пример #3
0
static int exif_decode_tag(AVCodecContext *avctx, GetByteContext *gbytes, int le,
                           int depth, AVDictionary **metadata)
{
    int ret, cur_pos;
    unsigned id, count;
    enum TiffTypes type;

    if (depth > 2) {
        return 0;
    }

    ff_tread_tag(gbytes, le, &id, &type, &count, &cur_pos);

    if (!bytestream2_tell(gbytes)) {
        bytestream2_seek(gbytes, cur_pos, SEEK_SET);
        return 0;
    }

    // read count values and add it metadata
    // store metadata or proceed with next IFD
    ret = ff_tis_ifd(id);
    if (ret) {
        ret = avpriv_exif_decode_ifd(avctx, gbytes, le, depth + 1, metadata);
    } else {
        const char *name = exif_get_tag_name(id);
        char *use_name   = (char*) name;

        if (!use_name) {
            use_name = av_malloc(7);
            if (!use_name) {
                return AVERROR(ENOMEM);
            }
            snprintf(use_name, 7, "0x%04X", id);
        }

        ret = exif_add_metadata(avctx, count, type, use_name, NULL,
                                gbytes, le, metadata);

        if (!name) {
            av_freep(&use_name);
        }
    }

    bytestream2_seek(gbytes, cur_pos, SEEK_SET);

    return ret;
}
Пример #4
0
static int tiff_decode_tag(TiffContext *s, AVFrame *frame)
{
    unsigned tag, type, count, off, value = 0, value2 = 0;
    int i, start;
    int pos;
    int ret;
    double *dp;

    ret = ff_tread_tag(&s->gb, s->le, &tag, &type, &count, &start);
    if (ret < 0) {
        goto end;
    }

    off = bytestream2_tell(&s->gb);
    if (count == 1) {
        switch (type) {
        case TIFF_BYTE:
        case TIFF_SHORT:
        case TIFF_LONG:
            value = ff_tget(&s->gb, type, s->le);
            break;
        case TIFF_RATIONAL:
            value  = ff_tget(&s->gb, TIFF_LONG, s->le);
            value2 = ff_tget(&s->gb, TIFF_LONG, s->le);
            break;
        case TIFF_STRING:
            if (count <= 4) {
                break;
            }
        default:
            value = UINT_MAX;
        }
    }

    switch (tag) {
    case TIFF_WIDTH:
        s->width = value;
        break;
    case TIFF_HEIGHT:
        s->height = value;
        break;
    case TIFF_BPP:
        s->bppcount = count;
        if (count > 4) {
            av_log(s->avctx, AV_LOG_ERROR,
                   "This format is not supported (bpp=%d, %d components)\n",
                   s->bpp, count);
            return AVERROR_INVALIDDATA;
        }
        if (count == 1)
            s->bpp = value;
        else {
            switch (type) {
            case TIFF_BYTE:
            case TIFF_SHORT:
            case TIFF_LONG:
                s->bpp = 0;
                if (bytestream2_get_bytes_left(&s->gb) < type_sizes[type] * count)
                    return AVERROR_INVALIDDATA;
                for (i = 0; i < count; i++)
                    s->bpp += ff_tget(&s->gb, type, s->le);
                break;
            default:
                s->bpp = -1;
            }
        }
        break;
    case TIFF_SAMPLES_PER_PIXEL:
        if (count != 1) {
            av_log(s->avctx, AV_LOG_ERROR,
                   "Samples per pixel requires a single value, many provided\n");
            return AVERROR_INVALIDDATA;
        }
        if (value > 4U) {
            av_log(s->avctx, AV_LOG_ERROR,
                   "Samples per pixel %d is too large\n", value);
            return AVERROR_INVALIDDATA;
        }
        if (s->bppcount == 1)
            s->bpp *= value;
        s->bppcount = value;
        break;
    case TIFF_COMPR:
        s->compr     = value;
        s->predictor = 0;
        switch (s->compr) {
        case TIFF_RAW:
        case TIFF_PACKBITS:
        case TIFF_LZW:
        case TIFF_CCITT_RLE:
            break;
        case TIFF_G3:
        case TIFF_G4:
            s->fax_opts = 0;
            break;
        case TIFF_DEFLATE:
        case TIFF_ADOBE_DEFLATE:
#if CONFIG_ZLIB
            break;
#else
            av_log(s->avctx, AV_LOG_ERROR, "Deflate: ZLib not compiled in\n");
            return AVERROR(ENOSYS);
#endif
        case TIFF_JPEG:
        case TIFF_NEWJPEG:
            avpriv_report_missing_feature(s->avctx, "JPEG compression");
            return AVERROR_PATCHWELCOME;
        default:
            av_log(s->avctx, AV_LOG_ERROR, "Unknown compression method %i\n",
                   s->compr);
            return AVERROR_INVALIDDATA;
        }
        break;
    case TIFF_ROWSPERSTRIP:
        if (!value || (type == TIFF_LONG && value == UINT_MAX))
            value = s->height;
        s->rps = FFMIN(value, s->height);
        break;
    case TIFF_STRIP_OFFS:
        if (count == 1) {
            s->strippos = 0;
            s->stripoff = value;
        } else
            s->strippos = off;
        s->strips = count;
        if (s->strips == 1)
            s->rps = s->height;
        s->sot = type;
        break;
    case TIFF_STRIP_SIZE:
        if (count == 1) {
            s->stripsizesoff = 0;
            s->stripsize     = value;
            s->strips        = 1;
        } else {
            s->stripsizesoff = off;
        }
        s->strips = count;
        s->sstype = type;
        break;
    case TIFF_XRES:
    case TIFF_YRES:
        set_sar(s, tag, value, value2);
        break;
    case TIFF_TILE_BYTE_COUNTS:
    case TIFF_TILE_LENGTH:
    case TIFF_TILE_OFFSETS:
    case TIFF_TILE_WIDTH:
        av_log(s->avctx, AV_LOG_ERROR, "Tiled images are not supported\n");
        return AVERROR_PATCHWELCOME;
        break;
    case TIFF_PREDICTOR:
        s->predictor = value;
        break;
    case TIFF_PHOTOMETRIC:
        switch (value) {
        case TIFF_PHOTOMETRIC_WHITE_IS_ZERO:
        case TIFF_PHOTOMETRIC_BLACK_IS_ZERO:
        case TIFF_PHOTOMETRIC_RGB:
        case TIFF_PHOTOMETRIC_PALETTE:
        case TIFF_PHOTOMETRIC_YCBCR:
            s->photometric = value;
            break;
        case TIFF_PHOTOMETRIC_ALPHA_MASK:
        case TIFF_PHOTOMETRIC_SEPARATED:
        case TIFF_PHOTOMETRIC_CIE_LAB:
        case TIFF_PHOTOMETRIC_ICC_LAB:
        case TIFF_PHOTOMETRIC_ITU_LAB:
        case TIFF_PHOTOMETRIC_CFA:
        case TIFF_PHOTOMETRIC_LOG_L:
        case TIFF_PHOTOMETRIC_LOG_LUV:
        case TIFF_PHOTOMETRIC_LINEAR_RAW:
            avpriv_report_missing_feature(s->avctx,
                                          "PhotometricInterpretation 0x%04X",
                                          value);
            return AVERROR_PATCHWELCOME;
        default:
            av_log(s->avctx, AV_LOG_ERROR, "PhotometricInterpretation %u is "
                   "unknown\n", value);
            return AVERROR_INVALIDDATA;
        }
        break;
    case TIFF_FILL_ORDER:
        if (value < 1 || value > 2) {
            av_log(s->avctx, AV_LOG_ERROR,
                   "Unknown FillOrder value %d, trying default one\n", value);
            value = 1;
        }
        s->fill_order = value - 1;
        break;
    case TIFF_PAL: {
        GetByteContext pal_gb[3];
        off = type_sizes[type];
        if (count / 3 > 256 ||
            bytestream2_get_bytes_left(&s->gb) < count / 3 * off * 3)
            return AVERROR_INVALIDDATA;

        pal_gb[0] = pal_gb[1] = pal_gb[2] = s->gb;
        bytestream2_skip(&pal_gb[1], count / 3 * off);
        bytestream2_skip(&pal_gb[2], count / 3 * off * 2);

        off = (type_sizes[type] - 1) << 3;
        for (i = 0; i < count / 3; i++) {
            uint32_t p = 0xFF000000;
            p |= (ff_tget(&pal_gb[0], type, s->le) >> off) << 16;
            p |= (ff_tget(&pal_gb[1], type, s->le) >> off) << 8;
            p |=  ff_tget(&pal_gb[2], type, s->le) >> off;
            s->palette[i] = p;
        }
        s->palette_is_set = 1;
        break;
    }
    case TIFF_PLANAR:
        s->planar = value == 2;
        break;
    case TIFF_YCBCR_SUBSAMPLING:
        if (count != 2) {
            av_log(s->avctx, AV_LOG_ERROR, "subsample count invalid\n");
            return AVERROR_INVALIDDATA;
        }
        for (i = 0; i < count; i++)
            s->subsampling[i] = ff_tget(&s->gb, type, s->le);
        break;
    case TIFF_T4OPTIONS:
        if (s->compr == TIFF_G3)
            s->fax_opts = value;
        break;
    case TIFF_T6OPTIONS:
        if (s->compr == TIFF_G4)
            s->fax_opts = value;
        break;
#define ADD_METADATA(count, name, sep)\
    if ((ret = add_metadata(count, type, name, sep, s, frame)) < 0) {\
        av_log(s->avctx, AV_LOG_ERROR, "Error allocating temporary buffer\n");\
        goto end;\
    }
    case TIFF_MODEL_PIXEL_SCALE:
        ADD_METADATA(count, "ModelPixelScaleTag", NULL);
        break;
    case TIFF_MODEL_TRANSFORMATION:
        ADD_METADATA(count, "ModelTransformationTag", NULL);
        break;
    case TIFF_MODEL_TIEPOINT:
        ADD_METADATA(count, "ModelTiepointTag", NULL);
        break;
    case TIFF_GEO_KEY_DIRECTORY:
        ADD_METADATA(1, "GeoTIFF_Version", NULL);
        ADD_METADATA(2, "GeoTIFF_Key_Revision", ".");
        s->geotag_count   = ff_tget_short(&s->gb, s->le);
        if (s->geotag_count > count / 4 - 1) {
            s->geotag_count = count / 4 - 1;
            av_log(s->avctx, AV_LOG_WARNING, "GeoTIFF key directory buffer shorter than specified\n");
        }
        if (bytestream2_get_bytes_left(&s->gb) < s->geotag_count * sizeof(int16_t) * 4) {
            s->geotag_count = 0;
            return -1;
        }
        s->geotags = av_mallocz_array(s->geotag_count, sizeof(TiffGeoTag));
        if (!s->geotags) {
            av_log(s->avctx, AV_LOG_ERROR, "Error allocating temporary buffer\n");
            s->geotag_count = 0;
            goto end;
        }
        for (i = 0; i < s->geotag_count; i++) {
            s->geotags[i].key    = ff_tget_short(&s->gb, s->le);
            s->geotags[i].type   = ff_tget_short(&s->gb, s->le);
            s->geotags[i].count  = ff_tget_short(&s->gb, s->le);

            if (!s->geotags[i].type)
                s->geotags[i].val  = get_geokey_val(s->geotags[i].key, ff_tget_short(&s->gb, s->le));
            else
                s->geotags[i].offset = ff_tget_short(&s->gb, s->le);
        }
        break;
    case TIFF_GEO_DOUBLE_PARAMS:
        if (count >= INT_MAX / sizeof(int64_t))
            return AVERROR_INVALIDDATA;
        if (bytestream2_get_bytes_left(&s->gb) < count * sizeof(int64_t))
            return AVERROR_INVALIDDATA;
        dp = av_malloc_array(count, sizeof(double));
        if (!dp) {
            av_log(s->avctx, AV_LOG_ERROR, "Error allocating temporary buffer\n");
            goto end;
        }
        for (i = 0; i < count; i++)
            dp[i] = ff_tget_double(&s->gb, s->le);
        for (i = 0; i < s->geotag_count; i++) {
            if (s->geotags[i].type == TIFF_GEO_DOUBLE_PARAMS) {
                if (s->geotags[i].count == 0
                    || s->geotags[i].offset + s->geotags[i].count > count) {
                    av_log(s->avctx, AV_LOG_WARNING, "Invalid GeoTIFF key %d\n", s->geotags[i].key);
                } else {
                    char *ap = doubles2str(&dp[s->geotags[i].offset], s->geotags[i].count, ", ");
                    if (!ap) {
                        av_log(s->avctx, AV_LOG_ERROR, "Error allocating temporary buffer\n");
                        av_freep(&dp);
                        return AVERROR(ENOMEM);
                    }
                    s->geotags[i].val = ap;
                }
            }
        }
        av_freep(&dp);
        break;
    case TIFF_GEO_ASCII_PARAMS:
        pos = bytestream2_tell(&s->gb);
        for (i = 0; i < s->geotag_count; i++) {
            if (s->geotags[i].type == TIFF_GEO_ASCII_PARAMS) {
                if (s->geotags[i].count == 0
                    || s->geotags[i].offset +  s->geotags[i].count > count) {
                    av_log(s->avctx, AV_LOG_WARNING, "Invalid GeoTIFF key %d\n", s->geotags[i].key);
                } else {
                    char *ap;

                    bytestream2_seek(&s->gb, pos + s->geotags[i].offset, SEEK_SET);
                    if (bytestream2_get_bytes_left(&s->gb) < s->geotags[i].count)
                        return AVERROR_INVALIDDATA;
                    ap = av_malloc(s->geotags[i].count);
                    if (!ap) {
                        av_log(s->avctx, AV_LOG_ERROR, "Error allocating temporary buffer\n");
                        return AVERROR(ENOMEM);
                    }
                    bytestream2_get_bufferu(&s->gb, ap, s->geotags[i].count);
                    ap[s->geotags[i].count - 1] = '\0'; //replace the "|" delimiter with a 0 byte
                    s->geotags[i].val = ap;
                }
            }
        }
        break;
    case TIFF_ARTIST:
        ADD_METADATA(count, "artist", NULL);
        break;
    case TIFF_COPYRIGHT:
        ADD_METADATA(count, "copyright", NULL);
        break;
    case TIFF_DATE:
        ADD_METADATA(count, "date", NULL);
        break;
    case TIFF_DOCUMENT_NAME:
        ADD_METADATA(count, "document_name", NULL);
        break;
    case TIFF_HOST_COMPUTER:
        ADD_METADATA(count, "computer", NULL);
        break;
    case TIFF_IMAGE_DESCRIPTION:
        ADD_METADATA(count, "description", NULL);
        break;
    case TIFF_MAKE:
        ADD_METADATA(count, "make", NULL);
        break;
    case TIFF_MODEL:
        ADD_METADATA(count, "model", NULL);
        break;
    case TIFF_PAGE_NAME:
        ADD_METADATA(count, "page_name", NULL);
        break;
    case TIFF_PAGE_NUMBER:
        ADD_METADATA(count, "page_number", " / ");
        break;
    case TIFF_SOFTWARE_NAME:
        ADD_METADATA(count, "software", NULL);
        break;
    default:
        if (s->avctx->err_recognition & AV_EF_EXPLODE) {
            av_log(s->avctx, AV_LOG_ERROR,
                   "Unknown or unsupported tag %d/0X%0X\n",
                   tag, tag);
            return AVERROR_INVALIDDATA;
        }
    }
end:
    bytestream2_seek(&s->gb, start, SEEK_SET);
    return 0;
}
Пример #5
0
static int decode_frame(AVCodecContext *avctx,
                        void *data, int *got_frame, AVPacket *avpkt)
{
    TiffContext *const s = avctx->priv_data;
    AVFrame *const p = data;
    ThreadFrame frame = { .f = data };
    unsigned off;
    int le, ret, plane, planes;
    int i, j, entries, stride;
    unsigned soff, ssize;
    uint8_t *dst;
    GetByteContext stripsizes;
    GetByteContext stripdata;

    bytestream2_init(&s->gb, avpkt->data, avpkt->size);

    // parse image header
    if ((ret = ff_tdecode_header(&s->gb, &le, &off))) {
        av_log(avctx, AV_LOG_ERROR, "Invalid TIFF header\n");
        return ret;
    } else if (off >= UINT_MAX - 14 || avpkt->size < off + 14) {
        av_log(avctx, AV_LOG_ERROR, "IFD offset is greater than image size\n");
        return AVERROR_INVALIDDATA;
    }
    s->le          = le;
    // TIFF_BPP is not a required tag and defaults to 1
    s->bppcount    = s->bpp = 1;
    s->photometric = TIFF_PHOTOMETRIC_NONE;
    s->compr       = TIFF_RAW;
    s->fill_order  = 0;
    free_geotags(s);

    // Reset these offsets so we can tell if they were set this frame
    s->stripsizesoff = s->strippos = 0;
    /* parse image file directory */
    bytestream2_seek(&s->gb, off, SEEK_SET);
    entries = ff_tget_short(&s->gb, le);
    if (bytestream2_get_bytes_left(&s->gb) < entries * 12)
        return AVERROR_INVALIDDATA;
    for (i = 0; i < entries; i++) {
        if ((ret = tiff_decode_tag(s, p)) < 0)
            return ret;
    }

    for (i = 0; i<s->geotag_count; i++) {
        const char *keyname = get_geokey_name(s->geotags[i].key);
        if (!keyname) {
            av_log(avctx, AV_LOG_WARNING, "Unknown or unsupported GeoTIFF key %d\n", s->geotags[i].key);
            continue;
        }
        if (get_geokey_type(s->geotags[i].key) != s->geotags[i].type) {
            av_log(avctx, AV_LOG_WARNING, "Type of GeoTIFF key %d is wrong\n", s->geotags[i].key);
            continue;
        }
        ret = av_dict_set(avpriv_frame_get_metadatap(p), keyname, s->geotags[i].val, 0);
        if (ret<0) {
            av_log(avctx, AV_LOG_ERROR, "Writing metadata with key '%s' failed\n", keyname);
            return ret;
        }
    }

    if (!s->strippos && !s->stripoff) {
        av_log(avctx, AV_LOG_ERROR, "Image data is missing\n");
        return AVERROR_INVALIDDATA;
    }
    /* now we have the data and may start decoding */
    if ((ret = init_image(s, &frame)) < 0)
        return ret;

    if (s->strips == 1 && !s->stripsize) {
        av_log(avctx, AV_LOG_WARNING, "Image data size missing\n");
        s->stripsize = avpkt->size - s->stripoff;
    }

    if (s->stripsizesoff) {
        if (s->stripsizesoff >= (unsigned)avpkt->size)
            return AVERROR_INVALIDDATA;
        bytestream2_init(&stripsizes, avpkt->data + s->stripsizesoff,
                         avpkt->size - s->stripsizesoff);
    }
    if (s->strippos) {
        if (s->strippos >= (unsigned)avpkt->size)
            return AVERROR_INVALIDDATA;
        bytestream2_init(&stripdata, avpkt->data + s->strippos,
                         avpkt->size - s->strippos);
    }

    if (s->rps <= 0) {
        av_log(avctx, AV_LOG_ERROR, "rps %d invalid\n", s->rps);
        return AVERROR_INVALIDDATA;
    }

    planes = s->planar ? s->bppcount : 1;
    for (plane = 0; plane < planes; plane++) {
        stride = p->linesize[plane];
        dst    = p->data[plane];
    for (i = 0; i < s->height; i += s->rps) {
        if (s->stripsizesoff)
            ssize = ff_tget(&stripsizes, s->sstype, le);
        else
            ssize = s->stripsize;

        if (s->strippos)
            soff = ff_tget(&stripdata, s->sot, le);
        else
            soff = s->stripoff;

        if (soff > avpkt->size || ssize > avpkt->size - soff) {
            av_log(avctx, AV_LOG_ERROR, "Invalid strip size/offset\n");
            return AVERROR_INVALIDDATA;
        }
        if ((ret = tiff_unpack_strip(s, p, dst, stride, avpkt->data + soff, ssize, i,
                                     FFMIN(s->rps, s->height - i))) < 0) {
            if (avctx->err_recognition & AV_EF_EXPLODE)
                return ret;
            break;
        }
        dst += s->rps * stride;
    }
    if (s->predictor == 2) {
        if (s->photometric == TIFF_PHOTOMETRIC_YCBCR) {
            av_log(s->avctx, AV_LOG_ERROR, "predictor == 2 with YUV is unsupported");
            return AVERROR_PATCHWELCOME;
        }
        dst   = p->data[plane];
        soff  = s->bpp >> 3;
        if (s->planar)
            soff  = FFMAX(soff / s->bppcount, 1);
        ssize = s->width * soff;
        if (s->avctx->pix_fmt == AV_PIX_FMT_RGB48LE ||
            s->avctx->pix_fmt == AV_PIX_FMT_RGBA64LE ||
            s->avctx->pix_fmt == AV_PIX_FMT_GBRP16LE ||
            s->avctx->pix_fmt == AV_PIX_FMT_GBRAP16LE) {
            for (i = 0; i < s->height; i++) {
                for (j = soff; j < ssize; j += 2)
                    AV_WL16(dst + j, AV_RL16(dst + j) + AV_RL16(dst + j - soff));
                dst += stride;
            }
        } else if (s->avctx->pix_fmt == AV_PIX_FMT_RGB48BE ||
                   s->avctx->pix_fmt == AV_PIX_FMT_RGBA64BE ||
                   s->avctx->pix_fmt == AV_PIX_FMT_GBRP16BE ||
                   s->avctx->pix_fmt == AV_PIX_FMT_GBRAP16BE) {
            for (i = 0; i < s->height; i++) {
                for (j = soff; j < ssize; j += 2)
                    AV_WB16(dst + j, AV_RB16(dst + j) + AV_RB16(dst + j - soff));
                dst += stride;
            }
        } else {
            for (i = 0; i < s->height; i++) {
                for (j = soff; j < ssize; j++)
                    dst[j] += dst[j - soff];
                dst += stride;
            }
        }
    }

    if (s->photometric == TIFF_PHOTOMETRIC_WHITE_IS_ZERO) {
        dst = p->data[plane];
        for (i = 0; i < s->height; i++) {
            for (j = 0; j < p->linesize[plane]; j++)
                dst[j] = (s->avctx->pix_fmt == AV_PIX_FMT_PAL8 ? (1<<s->bpp) - 1 : 255) - dst[j];
            dst += stride;
        }
    }
    }

    if (s->planar && s->bppcount > 2) {
        FFSWAP(uint8_t*, p->data[0],     p->data[2]);
        FFSWAP(int,      p->linesize[0], p->linesize[2]);
        FFSWAP(uint8_t*, p->data[0],     p->data[1]);
        FFSWAP(int,      p->linesize[0], p->linesize[1]);
    }
Пример #6
0
static int pcx_decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
                            AVPacket *avpkt) {
    GetByteContext gb;
    AVFrame * const p = data;
    int compressed, xmin, ymin, xmax, ymax, ret;
    unsigned int w, h, bits_per_pixel, bytes_per_line, nplanes, stride, y, x,
                 bytes_per_scanline;
    uint8_t *ptr, *scanline;

    if (avpkt->size < 128)
        return AVERROR_INVALIDDATA;

    bytestream2_init(&gb, avpkt->data, avpkt->size);

    if (bytestream2_get_byteu(&gb) != 0x0a || bytestream2_get_byteu(&gb) > 5) {
        av_log(avctx, AV_LOG_ERROR, "this is not PCX encoded data\n");
        return AVERROR_INVALIDDATA;
    }

    compressed = bytestream2_get_byteu(&gb);
    bits_per_pixel = bytestream2_get_byteu(&gb);
    xmin = bytestream2_get_le16u(&gb);
    ymin = bytestream2_get_le16u(&gb);
    xmax = bytestream2_get_le16u(&gb);
    ymax = bytestream2_get_le16u(&gb);
    avctx->sample_aspect_ratio.num = bytestream2_get_le16u(&gb);
    avctx->sample_aspect_ratio.den = bytestream2_get_le16u(&gb);

    if (xmax < xmin || ymax < ymin) {
        av_log(avctx, AV_LOG_ERROR, "invalid image dimensions\n");
        return AVERROR_INVALIDDATA;
    }

    w = xmax - xmin + 1;
    h = ymax - ymin + 1;

    bytestream2_skipu(&gb, 49);
    nplanes            = bytestream2_get_byteu(&gb);
    bytes_per_line     = bytestream2_get_le16u(&gb);
    bytes_per_scanline = nplanes * bytes_per_line;

    if (bytes_per_scanline < (w * bits_per_pixel * nplanes + 7) / 8) {
        av_log(avctx, AV_LOG_ERROR, "PCX data is corrupted\n");
        return AVERROR_INVALIDDATA;
    }

    switch ((nplanes<<8) + bits_per_pixel) {
        case 0x0308:
            avctx->pix_fmt = AV_PIX_FMT_RGB24;
            break;
        case 0x0108:
        case 0x0104:
        case 0x0102:
        case 0x0101:
        case 0x0401:
        case 0x0301:
        case 0x0201:
            avctx->pix_fmt = AV_PIX_FMT_PAL8;
            break;
        default:
            av_log(avctx, AV_LOG_ERROR, "invalid PCX file\n");
            return AVERROR_INVALIDDATA;
    }

    bytestream2_skipu(&gb, 60);

    if ((ret = av_image_check_size(w, h, 0, avctx)) < 0)
        return ret;
    if (w != avctx->width || h != avctx->height)
        avcodec_set_dimensions(avctx, w, h);
    if ((ret = ff_get_buffer(avctx, p, 0)) < 0) {
        av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
        return ret;
    }

    p->pict_type = AV_PICTURE_TYPE_I;

    ptr    = p->data[0];
    stride = p->linesize[0];

    scanline = av_malloc(bytes_per_scanline + FF_INPUT_BUFFER_PADDING_SIZE);
    if (!scanline)
        return AVERROR(ENOMEM);

    if (nplanes == 3 && bits_per_pixel == 8) {
        for (y=0; y<h; y++) {
            pcx_rle_decode(&gb, scanline, bytes_per_scanline, compressed);

            for (x=0; x<w; x++) {
                ptr[3*x  ] = scanline[x                    ];
                ptr[3*x+1] = scanline[x+ bytes_per_line    ];
                ptr[3*x+2] = scanline[x+(bytes_per_line<<1)];
            }

            ptr += stride;
        }

    } else if (nplanes == 1 && bits_per_pixel == 8) {
        int palstart = avpkt->size - 769;

        for (y=0; y<h; y++, ptr+=stride) {
            pcx_rle_decode(&gb, scanline, bytes_per_scanline, compressed);
            memcpy(ptr, scanline, w);
        }

        if (bytestream2_tell(&gb) != palstart) {
            av_log(avctx, AV_LOG_WARNING, "image data possibly corrupted\n");
            bytestream2_seek(&gb, palstart, SEEK_SET);
        }
        if (bytestream2_get_byte(&gb) != 12) {
            av_log(avctx, AV_LOG_ERROR, "expected palette after image data\n");
            ret = AVERROR_INVALIDDATA;
            goto end;
        }

    } else if (nplanes == 1) {   /* all packed formats, max. 16 colors */
        GetBitContext s;

        for (y=0; y<h; y++) {
            init_get_bits8(&s, scanline, bytes_per_scanline);

            pcx_rle_decode(&gb, scanline, bytes_per_scanline, compressed);

            for (x=0; x<w; x++)
                ptr[x] = get_bits(&s, bits_per_pixel);
            ptr += stride;
        }

    } else {    /* planar, 4, 8 or 16 colors */
        int i;

        for (y=0; y<h; y++) {
            pcx_rle_decode(&gb, scanline, bytes_per_scanline, compressed);

            for (x=0; x<w; x++) {
                int m = 0x80 >> (x&7), v = 0;
                for (i=nplanes - 1; i>=0; i--) {
                    v <<= 1;
                    v  += !!(scanline[i*bytes_per_line + (x>>3)] & m);
                }
                ptr[x] = v;
            }
            ptr += stride;
        }
    }

    ret = bytestream2_tell(&gb);
    if (nplanes == 1 && bits_per_pixel == 8) {
        pcx_palette(&gb, (uint32_t *) p->data[1], 256);
        ret += 256 * 3;
    } else if (bits_per_pixel * nplanes == 1) {
        AV_WN32A(p->data[1]  , 0xFF000000);
        AV_WN32A(p->data[1]+4, 0xFFFFFFFF);
    } else if (bits_per_pixel < 8) {
        bytestream2_seek(&gb, 16, SEEK_SET);
        pcx_palette(&gb, (uint32_t *) p->data[1], 16);
    }

    *got_frame = 1;

end:
    av_free(scanline);
    return ret;
}
Пример #7
0
static int decode_frame(AVCodecContext *avctx,
                        void *data, int *data_size,
                        AVPacket *avpkt)
{
    PicContext *s = avctx->priv_data;
    uint32_t *palette;
    int bits_per_plane, bpp, etype, esize, npal, pos_after_pal;
    int i, x, y, plane, tmp;

    bytestream2_init(&s->g, avpkt->data, avpkt->size);

    if (bytestream2_get_bytes_left(&s->g) < 11)
        return AVERROR_INVALIDDATA;

    if (bytestream2_get_le16u(&s->g) != 0x1234)
        return AVERROR_INVALIDDATA;

    s->width       = bytestream2_get_le16u(&s->g);
    s->height      = bytestream2_get_le16u(&s->g);
    bytestream2_skip(&s->g, 4);
    tmp            = bytestream2_get_byteu(&s->g);
    bits_per_plane = tmp & 0xF;
    s->nb_planes   = (tmp >> 4) + 1;
    bpp            = bits_per_plane * s->nb_planes;
    if (bits_per_plane > 8 || bpp < 1 || bpp > 32) {
        av_log_ask_for_sample(s, "unsupported bit depth\n");
        return AVERROR_INVALIDDATA;
    }

    if (bytestream2_peek_byte(&s->g) == 0xFF) {
        bytestream2_skip(&s->g, 2);
        etype = bytestream2_get_le16(&s->g);
        esize = bytestream2_get_le16(&s->g);
        if (bytestream2_get_bytes_left(&s->g) < esize)
            return AVERROR_INVALIDDATA;
    } else {
        etype = -1;
        esize = 0;
    }

    avctx->pix_fmt = PIX_FMT_PAL8;

    if (s->width != avctx->width && s->height != avctx->height) {
        if (av_image_check_size(s->width, s->height, 0, avctx) < 0)
            return -1;
        avcodec_set_dimensions(avctx, s->width, s->height);
        if (s->frame.data[0])
            avctx->release_buffer(avctx, &s->frame);
    }

    if (avctx->get_buffer(avctx, &s->frame) < 0){
        av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
        return -1;
    }
    memset(s->frame.data[0], 0, s->height * s->frame.linesize[0]);
    s->frame.pict_type           = AV_PICTURE_TYPE_I;
    s->frame.palette_has_changed = 1;

    pos_after_pal = bytestream2_tell(&s->g) + esize;
    palette = (uint32_t*)s->frame.data[1];
    if (etype == 1 && esize > 1 && bytestream2_peek_byte(&s->g) < 6) {
        int idx = bytestream2_get_byte(&s->g);
        npal = 4;
        for (i = 0; i < npal; i++)
            palette[i] = ff_cga_palette[ cga_mode45_index[idx][i] ];
    } else if (etype == 2) {
        npal = FFMIN(esize, 16);
        for (i = 0; i < npal; i++) {
            int pal_idx = bytestream2_get_byte(&s->g);
            palette[i]  = ff_cga_palette[FFMIN(pal_idx, 16)];
        }
    } else if (etype == 3) {
        npal = FFMIN(esize, 16);
        for (i = 0; i < npal; i++) {
            int pal_idx = bytestream2_get_byte(&s->g);
            palette[i]  = ff_ega_palette[FFMIN(pal_idx, 63)];
        }
    } else if (etype == 4 || etype == 5) {
        npal = FFMIN(esize / 3, 256);
        for (i = 0; i < npal; i++)
            palette[i] = bytestream2_get_be24(&s->g) << 2;
    } else {
        if (bpp == 1) {
            npal = 2;
            palette[0] = 0x000000;
            palette[1] = 0xFFFFFF;
        } else if (bpp == 2) {
            npal = 4;
            for (i = 0; i < npal; i++)
                palette[i] = ff_cga_palette[ cga_mode45_index[0][i] ];
        } else {
            npal = 16;
            memcpy(palette, ff_cga_palette, npal * 4);
        }
    }
    // fill remaining palette entries
    memset(palette + npal, 0, AVPALETTE_SIZE - npal * 4);
    // skip remaining palette bytes
    bytestream2_seek(&s->g, pos_after_pal, SEEK_SET);

    x = 0;
    y = s->height - 1;
    plane = 0;
    if (bytestream2_get_le16(&s->g)) {
        while (bytestream2_get_bytes_left(&s->g) >= 6) {
            int stop_size, marker, t1, t2;

            t1        = bytestream2_get_bytes_left(&s->g);
            t2        = bytestream2_get_le16(&s->g);
            stop_size = t1 - FFMIN(t1, t2);
            // ignore uncompressed block size
            bytestream2_skip(&s->g, 2);
            marker    = bytestream2_get_byte(&s->g);

            while (plane < s->nb_planes &&
                   bytestream2_get_bytes_left(&s->g) > stop_size) {
                int run = 1;
                int val = bytestream2_get_byte(&s->g);
                if (val == marker) {
                    run = bytestream2_get_byte(&s->g);
                    if (run == 0)
                        run = bytestream2_get_le16(&s->g);
                    val = bytestream2_get_byte(&s->g);
                }
                if (!bytestream2_get_bytes_left(&s->g))
                    break;

                if (bits_per_plane == 8) {
                    picmemset_8bpp(s, val, run, &x, &y);
                    if (y < 0)
                        break;
                } else {
                    picmemset(s, val, run, &x, &y, &plane, bits_per_plane);
                }
            }
        }
    } else {
        av_log_ask_for_sample(s, "uncompressed image\n");
        return avpkt->size;
    }

    *data_size = sizeof(AVFrame);
    *(AVFrame*)data = s->frame;
    return avpkt->size;
}
Пример #8
0
static int decode_frame(AVCodecContext *avctx,
                        void *data, int *got_frame,
                        AVPacket *avpkt)
{
    SgiState *s = avctx->priv_data;
    AVFrame *p = data;
    unsigned int dimension, rle;
    int ret = 0;
    uint8_t *out_buf, *out_end;

    bytestream2_init(&s->g, avpkt->data, avpkt->size);
    if (bytestream2_get_bytes_left(&s->g) < SGI_HEADER_SIZE) {
        av_log(avctx, AV_LOG_ERROR, "buf_size too small (%d)\n", avpkt->size);
        return AVERROR_INVALIDDATA;
    }

    /* Test for SGI magic. */
    if (bytestream2_get_be16(&s->g) != SGI_MAGIC) {
        av_log(avctx, AV_LOG_ERROR, "bad magic number\n");
        return AVERROR_INVALIDDATA;
    }

    rle                  = bytestream2_get_byte(&s->g);
    s->bytes_per_channel = bytestream2_get_byte(&s->g);
    dimension            = bytestream2_get_be16(&s->g);
    s->width             = bytestream2_get_be16(&s->g);
    s->height            = bytestream2_get_be16(&s->g);
    s->depth             = bytestream2_get_be16(&s->g);

    if (s->bytes_per_channel != 1 && s->bytes_per_channel != 2) {
        av_log(avctx, AV_LOG_ERROR, "wrong channel number\n");
        return -1;
    }

    /* Check for supported image dimensions. */
    if (dimension != 2 && dimension != 3) {
        av_log(avctx, AV_LOG_ERROR, "wrong dimension number\n");
        return -1;
    }

    if (s->depth == SGI_GRAYSCALE) {
        avctx->pix_fmt = s->bytes_per_channel == 2 ? AV_PIX_FMT_GRAY16BE : AV_PIX_FMT_GRAY8;
    } else if (s->depth == SGI_RGB) {
        avctx->pix_fmt = s->bytes_per_channel == 2 ? AV_PIX_FMT_RGB48BE : AV_PIX_FMT_RGB24;
    } else if (s->depth == SGI_RGBA) {
        avctx->pix_fmt = s->bytes_per_channel == 2 ? AV_PIX_FMT_RGBA64BE : AV_PIX_FMT_RGBA;
    } else {
        av_log(avctx, AV_LOG_ERROR, "wrong picture format\n");
        return -1;
    }

    ret = ff_set_dimensions(avctx, s->width, s->height);
    if (ret < 0)
        return ret;

    if (ff_get_buffer(avctx, p, 0) < 0) {
        av_log(avctx, AV_LOG_ERROR, "get_buffer() failed.\n");
        return -1;
    }

    p->pict_type = AV_PICTURE_TYPE_I;
    p->key_frame = 1;
    out_buf = p->data[0];

    out_end = out_buf + p->linesize[0] * s->height;

    s->linesize = p->linesize[0];

    /* Skip header. */
    bytestream2_seek(&s->g, SGI_HEADER_SIZE, SEEK_SET);
    if (rle) {
        ret = read_rle_sgi(out_end, s);
    } else {
        ret = read_uncompressed_sgi(out_buf, s);
    }

    if (ret == 0) {
        *got_frame = 1;
        return avpkt->size;
    } else {
        return ret;
    }
}
Пример #9
0
static int redspark_read_header(AVFormatContext *s)
{
    AVIOContext *pb = s->pb;
    RedSparkContext *redspark = s->priv_data;
    AVCodecParameters *par;
    GetByteContext gbc;
    int i, coef_off, ret = 0;
    uint32_t key, data;
    uint8_t header[HEADER_SIZE];
    AVStream *st;

    st = avformat_new_stream(s, NULL);
    if (!st)
        return AVERROR(ENOMEM);
    par = st->codecpar;

    /* Decrypt header */
    data = avio_rb32(pb);
    key  = data ^ 0x52656453;
    data ^= key;
    AV_WB32(header, data);
    key = rol(key, 11);

    for (i = 4; i < HEADER_SIZE; i += 4) {
        key += rol(key, 3);
        data = avio_rb32(pb) ^ key;
        AV_WB32(header + i, data);
    }

    par->codec_id    = AV_CODEC_ID_ADPCM_THP;
    par->codec_type  = AVMEDIA_TYPE_AUDIO;

    bytestream2_init(&gbc, header, HEADER_SIZE);
    bytestream2_seek(&gbc, 0x3c, SEEK_SET);
    par->sample_rate = bytestream2_get_be32u(&gbc);
    if (par->sample_rate <= 0 || par->sample_rate > 96000) {
        av_log(s, AV_LOG_ERROR, "Invalid sample rate: %d\n", par->sample_rate);
        return AVERROR_INVALIDDATA;
    }

    st->duration = bytestream2_get_be32u(&gbc) * 14;
    redspark->samples_count = 0;
    bytestream2_skipu(&gbc, 10);
    par->channels = bytestream2_get_byteu(&gbc);
    if (!par->channels) {
        return AVERROR_INVALIDDATA;
    }

    coef_off = 0x54 + par->channels * 8;
    if (bytestream2_get_byteu(&gbc)) // Loop flag
        coef_off += 16;

    if (coef_off + par->channels * (32 + 14) > HEADER_SIZE) {
        return AVERROR_INVALIDDATA;
    }

    if (ff_alloc_extradata(par, 32 * par->channels)) {
        return AVERROR_INVALIDDATA;
    }

    /* Get the ADPCM table */
    bytestream2_seek(&gbc, coef_off, SEEK_SET);
    for (i = 0; i < par->channels; i++) {
        if (bytestream2_get_bufferu(&gbc, par->extradata + i * 32, 32) != 32) {
            return AVERROR_INVALIDDATA;
        }
        bytestream2_skipu(&gbc, 14);
    }

    avpriv_set_pts_info(st, 64, 1, par->sample_rate);

    return ret;
}