/* * To be a valid APNG file, we mandate, in this order: * PNGSIG * IHDR * ... * acTL * ... * IDAT */ static int apng_probe(AVProbeData *p) { GetByteContext gb; int state = 0; uint32_t len, tag; bytestream2_init(&gb, p->buf, p->buf_size); if (bytestream2_get_be64(&gb) != PNGSIG) return 0; for (;;) { len = bytestream2_get_be32(&gb); if (len > 0x7fffffff) return 0; tag = bytestream2_get_le32(&gb); /* we don't check IDAT size, as this is the last tag * we check, and it may be larger than the probe buffer */ if (tag != MKTAG('I', 'D', 'A', 'T') && len > bytestream2_get_bytes_left(&gb)) return 0; switch (tag) { case MKTAG('I', 'H', 'D', 'R'): if (len != 13) return 0; if (av_image_check_size(bytestream2_get_be32(&gb), bytestream2_get_be32(&gb), 0, NULL)) return 0; bytestream2_skip(&gb, 9); state++; break; case MKTAG('a', 'c', 'T', 'L'): if (state != 1 || len != 8 || bytestream2_get_be32(&gb) == 0) /* 0 is not a valid value for number of frames */ return 0; bytestream2_skip(&gb, 8); state++; break; case MKTAG('I', 'D', 'A', 'T'): if (state != 2) return 0; goto end; default: /* skip other tags */ bytestream2_skip(&gb, len + 4); break; } } end: return AVPROBE_SCORE_MAX; }
/** * Read a run length encoded SGI image. * @param out_buf output buffer * @param s the current image state * @return 0 if no error, else return error number. */ static int read_rle_sgi(uint8_t *out_buf, SgiState *s) { uint8_t *dest_row; unsigned int len = s->height * s->depth * 4; GetByteContext g_table = s->g; unsigned int y, z; unsigned int start_offset; int linesize, ret; /* size of RLE offset and length tables */ if (len * 2 > bytestream2_get_bytes_left(&s->g)) { return AVERROR_INVALIDDATA; } for (z = 0; z < s->depth; z++) { dest_row = out_buf; for (y = 0; y < s->height; y++) { linesize = s->width * s->depth * s->bytes_per_channel; dest_row -= s->linesize; start_offset = bytestream2_get_be32(&g_table); bytestream2_seek(&s->g, start_offset, SEEK_SET); if (s->bytes_per_channel == 1) ret = expand_rle_row8(s, dest_row + z, linesize, s->depth); else ret = expand_rle_row16(s, (uint16_t *)dest_row + z, linesize, s->depth); if (ret != s->width) return AVERROR_INVALIDDATA; } } return 0; }
/** * Read a run length encoded SGI image. * @param out_buf output buffer * @param s the current image state * @return 0 if no error, else return error number. */ static int read_rle_sgi(uint8_t *out_buf, SgiState *s) { uint8_t *dest_row; unsigned int len = s->height * s->depth * 4; GetByteContext g_table = s->g; unsigned int y, z; unsigned int start_offset; /* size of RLE offset and length tables */ if (len * 2 > bytestream2_get_bytes_left(&s->g)) { return AVERROR_INVALIDDATA; } for (z = 0; z < s->depth; z++) { dest_row = out_buf; for (y = 0; y < s->height; y++) { dest_row -= s->linesize; start_offset = bytestream2_get_be32(&g_table); bytestream2_seek(&s->g, start_offset, SEEK_SET); if (expand_rle_row(s, dest_row + z, dest_row + FFABS(s->linesize), s->depth) != s->width) { return AVERROR_INVALIDDATA; } } } return 0; }
static int hqa_decode_frame(HQContext *ctx, AVFrame *pic, size_t data_size) { GetBitContext gb; const int num_slices = 8; uint32_t slice_off[9]; int i, slice, ret; int width, height, quant; const uint8_t *src = ctx->gbc.buffer; width = bytestream2_get_be16(&ctx->gbc); height = bytestream2_get_be16(&ctx->gbc); ctx->avctx->coded_width = FFALIGN(width, 16); ctx->avctx->coded_height = FFALIGN(height, 16); ctx->avctx->width = width; ctx->avctx->height = height; ctx->avctx->bits_per_raw_sample = 8; ctx->avctx->pix_fmt = AV_PIX_FMT_YUVA422P; av_log(ctx->avctx, AV_LOG_VERBOSE, "HQA Profile\n"); quant = bytestream2_get_byte(&ctx->gbc); bytestream2_skip(&ctx->gbc, 3); if (quant >= NUM_HQ_QUANTS) { av_log(ctx->avctx, AV_LOG_ERROR, "Invalid quantization matrix %d.\n", quant); return AVERROR_INVALIDDATA; } ret = ff_get_buffer(ctx->avctx, pic, 0); if (ret < 0) { av_log(ctx->avctx, AV_LOG_ERROR, "Could not allocate buffer.\n"); return ret; } /* Offsets are stored from HQA1 position, so adjust them accordingly. */ for (i = 0; i < num_slices + 1; i++) slice_off[i] = bytestream2_get_be32(&ctx->gbc) - 4; for (slice = 0; slice < num_slices; slice++) { if (slice_off[slice] < (num_slices + 1) * 3 || slice_off[slice] >= slice_off[slice + 1] || slice_off[slice + 1] > data_size) { av_log(ctx->avctx, AV_LOG_ERROR, "Invalid slice size %zu.\n", data_size); break; } init_get_bits(&gb, src + slice_off[slice], (slice_off[slice + 1] - slice_off[slice]) * 8); ret = hqa_decode_slice(ctx, pic, &gb, quant, slice, width, height); if (ret < 0) return ret; } return 0; }
static int tm2_read_stream(TM2Context *ctx, const uint8_t *buf, int stream_id, int buf_size) { int i, ret; int skip = 0; int len, toks, pos; TM2Codes codes; GetByteContext gb; if (buf_size < 4) { av_log(ctx->avctx, AV_LOG_ERROR, "not enough space for len left\n"); return AVERROR_INVALIDDATA; } /* get stream length in dwords */ bytestream2_init(&gb, buf, buf_size); len = bytestream2_get_be32(&gb); skip = len * 4 + 4; if (len == 0) return 4; if (len >= INT_MAX / 4 - 1 || len < 0 || skip > buf_size) { av_log(ctx->avctx, AV_LOG_ERROR, "Error, invalid stream size.\n"); return AVERROR_INVALIDDATA; } toks = bytestream2_get_be32(&gb); if (toks & 1) { len = bytestream2_get_be32(&gb); if (len == TM2_ESCAPE) { len = bytestream2_get_be32(&gb); } if (len > 0) { pos = bytestream2_tell(&gb); if (skip <= pos) return AVERROR_INVALIDDATA; init_get_bits(&ctx->gb, buf + pos, (skip - pos) * 8); if ((ret = tm2_read_deltas(ctx, stream_id)) < 0) return ret; bytestream2_skip(&gb, ((get_bits_count(&ctx->gb) + 31) >> 5) << 2); }
static int pix_decode_header(PixHeader *out, GetByteContext *pgb) { unsigned int header_len = bytestream2_get_be32(pgb); out->format = bytestream2_get_byte(pgb); bytestream2_skip(pgb, 2); out->width = bytestream2_get_be16(pgb); out->height = bytestream2_get_be16(pgb); // the header is at least 11 bytes long; we read the first 7 if (header_len < 11) return AVERROR_INVALIDDATA; // skip the rest of the header bytestream2_skip(pgb, header_len - 7); return 0; }
static int pix_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt) { AVFrame *frame = data; int ret, i; GetByteContext gb; unsigned int bytes_pp; unsigned int magic[4]; unsigned int chunk_type; unsigned int data_len; unsigned int bytes_per_scanline; unsigned int bytes_left; PixHeader hdr; bytestream2_init(&gb, avpkt->data, avpkt->size); magic[0] = bytestream2_get_be32(&gb); magic[1] = bytestream2_get_be32(&gb); magic[2] = bytestream2_get_be32(&gb); magic[3] = bytestream2_get_be32(&gb); if (magic[0] != 0x12 || magic[1] != 0x08 || magic[2] != 0x02 || magic[3] != 0x02) { av_log(avctx, AV_LOG_ERROR, "Not a BRender PIX file.\n"); return AVERROR_INVALIDDATA; } chunk_type = bytestream2_get_be32(&gb); if (chunk_type != HEADER1_CHUNK && chunk_type != HEADER2_CHUNK) { av_log(avctx, AV_LOG_ERROR, "Invalid chunk type %d.\n", chunk_type); return AVERROR_INVALIDDATA; } ret = pix_decode_header(&hdr, &gb); if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "Invalid header length.\n"); return ret; } switch (hdr.format) { case 3: avctx->pix_fmt = AV_PIX_FMT_PAL8; bytes_pp = 1; break; case 4: avctx->pix_fmt = AV_PIX_FMT_RGB555BE; bytes_pp = 2; break; case 5: avctx->pix_fmt = AV_PIX_FMT_RGB565BE; bytes_pp = 2; break; case 6: avctx->pix_fmt = AV_PIX_FMT_RGB24; bytes_pp = 3; break; case 7: avctx->pix_fmt = AV_PIX_FMT_0RGB; bytes_pp = 4; break; case 8: // ARGB avctx->pix_fmt = AV_PIX_FMT_ARGB; bytes_pp = 4; break; case 18: avctx->pix_fmt = AV_PIX_FMT_Y400A; bytes_pp = 2; break; default: avpriv_request_sample(avctx, "Format %d", hdr.format); return AVERROR_PATCHWELCOME; } if ((ret = ff_set_dimensions(avctx, hdr.width, hdr.height)) < 0) return ret; if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) return ret; chunk_type = bytestream2_get_be32(&gb); if (avctx->pix_fmt == AV_PIX_FMT_PAL8 && (chunk_type == HEADER1_CHUNK || chunk_type == HEADER2_CHUNK)) { /* read palette data from data[1] */ PixHeader palhdr; uint32_t *pal_out = (uint32_t *)frame->data[1]; ret = pix_decode_header(&palhdr, &gb); if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "Invalid palette header length.\n"); return ret; } if (palhdr.format != 7) avpriv_request_sample(avctx, "Palette not in RGB format"); chunk_type = bytestream2_get_be32(&gb); data_len = bytestream2_get_be32(&gb); bytestream2_skip(&gb, 8); if (chunk_type != IMAGE_DATA_CHUNK || data_len != 1032 || bytestream2_get_bytes_left(&gb) < 1032) { av_log(avctx, AV_LOG_ERROR, "Invalid palette data.\n"); return AVERROR_INVALIDDATA; } // palette data is surrounded by 8 null bytes (both top and bottom) // convert 0RGB to machine endian format (ARGB32) for (i = 0; i < 256; ++i) *pal_out++ = (0xFFU << 24) | bytestream2_get_be32u(&gb); bytestream2_skip(&gb, 8); frame->palette_has_changed = 1; chunk_type = bytestream2_get_be32(&gb); } else if (avctx->pix_fmt == AV_PIX_FMT_PAL8) { /* no palette supplied, use the default one */ uint32_t *pal_out = (uint32_t *)frame->data[1]; // TODO: add an AVOption to load custom palette files av_log(avctx, AV_LOG_WARNING, "Using default palette, colors might be off.\n"); memcpy(pal_out, std_pal_table, sizeof(uint32_t) * 256); frame->palette_has_changed = 1; } data_len = bytestream2_get_be32(&gb); bytestream2_skip(&gb, 8); // read the image data to the buffer bytes_per_scanline = bytes_pp * hdr.width; bytes_left = bytestream2_get_bytes_left(&gb); if (chunk_type != IMAGE_DATA_CHUNK || data_len != bytes_left || bytes_left / bytes_per_scanline < hdr.height) { av_log(avctx, AV_LOG_ERROR, "Invalid image data.\n"); return AVERROR_INVALIDDATA; } av_image_copy_plane(frame->data[0], frame->linesize[0], avpkt->data + bytestream2_tell(&gb), bytes_per_scanline, bytes_per_scanline, hdr.height); frame->pict_type = AV_PICTURE_TYPE_I; frame->key_frame = 1; *got_frame = 1; return avpkt->size; }
unsigned ff_tget_long(GetByteContext *gb, int le) { unsigned v = le ? bytestream2_get_le32(gb) : bytestream2_get_be32(gb); return v; }
static void init_rangecoder(RangeCoder *rc, GetByteContext *gb) { rc->code1 = 0; rc->range = 0xFFFFFFFFU; rc->code = bytestream2_get_be32(gb); }
static int brpix_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt) { BRPixContext *s = avctx->priv_data; AVFrame *frame_out = data; int ret; GetByteContext gb; unsigned int bytes_pp; unsigned int magic[4]; unsigned int chunk_type; unsigned int data_len; BRPixHeader hdr; bytestream2_init(&gb, avpkt->data, avpkt->size); magic[0] = bytestream2_get_be32(&gb); magic[1] = bytestream2_get_be32(&gb); magic[2] = bytestream2_get_be32(&gb); magic[3] = bytestream2_get_be32(&gb); if (magic[0] != 0x12 || magic[1] != 0x8 || magic[2] != 0x2 || magic[3] != 0x2) { av_log(avctx, AV_LOG_ERROR, "Not a BRender PIX file\n"); return AVERROR_INVALIDDATA; } chunk_type = bytestream2_get_be32(&gb); if (chunk_type != 0x3 && chunk_type != 0x3d) { av_log(avctx, AV_LOG_ERROR, "Invalid chunk type %d\n", chunk_type); return AVERROR_INVALIDDATA; } ret = brpix_decode_header(&hdr, &gb); if (!ret) { av_log(avctx, AV_LOG_ERROR, "Invalid header length\n"); return AVERROR_INVALIDDATA; } switch (hdr.format) { case 3: avctx->pix_fmt = AV_PIX_FMT_PAL8; bytes_pp = 1; break; case 4: avctx->pix_fmt = AV_PIX_FMT_RGB555BE; bytes_pp = 2; break; case 5: avctx->pix_fmt = AV_PIX_FMT_RGB565BE; bytes_pp = 2; break; case 6: avctx->pix_fmt = AV_PIX_FMT_RGB24; bytes_pp = 3; break; case 7: avctx->pix_fmt = AV_PIX_FMT_0RGB; bytes_pp = 4; break; case 18: avctx->pix_fmt = AV_PIX_FMT_GRAY8A; bytes_pp = 2; break; default: av_log(avctx, AV_LOG_ERROR, "Format %d is not supported\n", hdr.format); return AVERROR_PATCHWELCOME; } if (s->frame.data[0]) avctx->release_buffer(avctx, &s->frame); if (av_image_check_size(hdr.width, hdr.height, 0, avctx) < 0) return AVERROR_INVALIDDATA; if (hdr.width != avctx->width || hdr.height != avctx->height) avcodec_set_dimensions(avctx, hdr.width, hdr.height); if ((ret = ff_get_buffer(avctx, &s->frame)) < 0) { av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n"); return ret; } chunk_type = bytestream2_get_be32(&gb); if (avctx->pix_fmt == AV_PIX_FMT_PAL8 && (chunk_type == 0x3 || chunk_type == 0x3d)) { BRPixHeader palhdr; uint32_t *pal_out = (uint32_t *)s->frame.data[1]; int i; ret = brpix_decode_header(&palhdr, &gb); if (!ret) { av_log(avctx, AV_LOG_ERROR, "Invalid palette header length\n"); return AVERROR_INVALIDDATA; } if (palhdr.format != 7) { av_log(avctx, AV_LOG_ERROR, "Palette is not in 0RGB format\n"); return AVERROR_INVALIDDATA; } chunk_type = bytestream2_get_be32(&gb); data_len = bytestream2_get_be32(&gb); bytestream2_skip(&gb, 8); if (chunk_type != 0x21 || data_len != 1032 || bytestream2_get_bytes_left(&gb) < 1032) { av_log(avctx, AV_LOG_ERROR, "Invalid palette data\n"); return AVERROR_INVALIDDATA; } // convert 0RGB to machine endian format (ARGB32) for (i = 0; i < 256; ++i) { bytestream2_skipu(&gb, 1); *pal_out++ = (0xFFU << 24) | bytestream2_get_be24u(&gb); } bytestream2_skip(&gb, 8); s->frame.palette_has_changed = 1; chunk_type = bytestream2_get_be32(&gb); } data_len = bytestream2_get_be32(&gb); bytestream2_skip(&gb, 8); // read the image data to the buffer { unsigned int bytes_per_scanline = bytes_pp * hdr.width; unsigned int bytes_left = bytestream2_get_bytes_left(&gb); if (chunk_type != 0x21 || data_len != bytes_left || bytes_left / bytes_per_scanline < hdr.height) { av_log(avctx, AV_LOG_ERROR, "Invalid image data\n"); return AVERROR_INVALIDDATA; } av_image_copy_plane(s->frame.data[0], s->frame.linesize[0], avpkt->data + bytestream2_tell(&gb), bytes_per_scanline, bytes_per_scanline, hdr.height); } *frame_out = s->frame; *got_frame = 1; return avpkt->size; }
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt) { AVFrame * const p = data; GetByteContext gbc; int colors; int w, h, ret; int ver; bytestream2_init(&gbc, avpkt->data, avpkt->size); if ( bytestream2_get_bytes_left(&gbc) >= 552 && check_header(gbc.buffer + 512, bytestream2_get_bytes_left(&gbc) - 512) ) bytestream2_skip(&gbc, 512); ver = check_header(gbc.buffer, bytestream2_get_bytes_left(&gbc)); /* smallest PICT header */ if (bytestream2_get_bytes_left(&gbc) < 40) { av_log(avctx, AV_LOG_ERROR, "Frame is too small %d\n", bytestream2_get_bytes_left(&gbc)); return AVERROR_INVALIDDATA; } bytestream2_skip(&gbc, 6); h = bytestream2_get_be16(&gbc); w = bytestream2_get_be16(&gbc); ret = ff_set_dimensions(avctx, w, h); if (ret < 0) return ret; /* version 1 is identified by 0x1101 * it uses byte-aligned opcodes rather than word-aligned */ if (ver == 1) { avpriv_request_sample(avctx, "QuickDraw version 1"); return AVERROR_PATCHWELCOME; } else if (ver != 2) { avpriv_request_sample(avctx, "QuickDraw version unknown (%X)", bytestream2_get_be32(&gbc)); return AVERROR_PATCHWELCOME; } bytestream2_skip(&gbc, 4+26); while (bytestream2_get_bytes_left(&gbc) >= 4) { int bppcnt, bpp; int rowbytes, pack_type; int opcode = bytestream2_get_be16(&gbc); switch(opcode) { case PACKBITSRECT: case PACKBITSRGN: av_log(avctx, AV_LOG_DEBUG, "Parsing Packbit opcode\n"); bytestream2_skip(&gbc, 30); bppcnt = bytestream2_get_be16(&gbc); /* cmpCount */ bpp = bytestream2_get_be16(&gbc); /* cmpSize */ av_log(avctx, AV_LOG_DEBUG, "bppcount %d bpp %d\n", bppcnt, bpp); if (bppcnt == 1 && bpp == 8) { avctx->pix_fmt = AV_PIX_FMT_PAL8; } else { av_log(avctx, AV_LOG_ERROR, "Invalid pixel format (bppcnt %d bpp %d) in Packbit\n", bppcnt, bpp); return AVERROR_INVALIDDATA; } /* jump to palette */ bytestream2_skip(&gbc, 18); colors = bytestream2_get_be16(&gbc); if (colors < 0 || colors > 256) { av_log(avctx, AV_LOG_ERROR, "Error color count - %i(0x%X)\n", colors, colors); return AVERROR_INVALIDDATA; } if (bytestream2_get_bytes_left(&gbc) < (colors + 1) * 8) { av_log(avctx, AV_LOG_ERROR, "Palette is too small %d\n", bytestream2_get_bytes_left(&gbc)); return AVERROR_INVALIDDATA; } if ((ret = ff_get_buffer(avctx, p, 0)) < 0) return ret; parse_palette(avctx, &gbc, (uint32_t *)p->data[1], colors); p->palette_has_changed = 1; /* jump to image data */ bytestream2_skip(&gbc, 18); if (opcode == PACKBITSRGN) { bytestream2_skip(&gbc, 2 + 8); /* size + rect */ avpriv_report_missing_feature(avctx, "Packbit mask region"); } ret = decode_rle(avctx, p, &gbc, bppcnt); if (ret < 0) return ret; *got_frame = 1; break; case DIRECTBITSRECT: case DIRECTBITSRGN: av_log(avctx, AV_LOG_DEBUG, "Parsing Directbit opcode\n"); bytestream2_skip(&gbc, 4); rowbytes = bytestream2_get_be16(&gbc) & 0x3FFF; if (rowbytes <= 250) { avpriv_report_missing_feature(avctx, "Short rowbytes"); return AVERROR_PATCHWELCOME; } bytestream2_skip(&gbc, 10); pack_type = bytestream2_get_be16(&gbc); bytestream2_skip(&gbc, 16); bppcnt = bytestream2_get_be16(&gbc); /* cmpCount */ bpp = bytestream2_get_be16(&gbc); /* cmpSize */ av_log(avctx, AV_LOG_DEBUG, "bppcount %d bpp %d\n", bppcnt, bpp); if (bppcnt == 3 && bpp == 8) { avctx->pix_fmt = AV_PIX_FMT_RGB24; } else if (bppcnt == 4 && bpp == 8) { avctx->pix_fmt = AV_PIX_FMT_ARGB; } else { av_log(avctx, AV_LOG_ERROR, "Invalid pixel format (bppcnt %d bpp %d) in Directbit\n", bppcnt, bpp); return AVERROR_INVALIDDATA; } /* set packing when default is selected */ if (pack_type == 0) pack_type = bppcnt; if (pack_type != 3 && pack_type != 4) { avpriv_request_sample(avctx, "Pack type %d", pack_type); return AVERROR_PATCHWELCOME; } if ((ret = ff_get_buffer(avctx, p, 0)) < 0) { av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n"); return ret; } /* jump to data */ bytestream2_skip(&gbc, 30); if (opcode == DIRECTBITSRGN) { bytestream2_skip(&gbc, 2 + 8); /* size + rect */ avpriv_report_missing_feature(avctx, "DirectBit mask region"); } ret = decode_rle(avctx, p, &gbc, bppcnt); if (ret < 0) return ret; *got_frame = 1; break; default: av_log(avctx, AV_LOG_TRACE, "Unknown 0x%04X opcode\n", opcode); break; } /* exit the loop when a known pixel block has been found */ if (*got_frame) { int eop, trail; /* re-align to a word */ bytestream2_skip(&gbc, bytestream2_get_bytes_left(&gbc) % 2); eop = bytestream2_get_be16(&gbc); trail = bytestream2_get_bytes_left(&gbc); if (eop != EOP) av_log(avctx, AV_LOG_WARNING, "Missing end of picture opcode (found 0x%04X)\n", eop); if (trail) av_log(avctx, AV_LOG_WARNING, "Got %d trailing bytes\n", trail); break; } } if (*got_frame) { p->pict_type = AV_PICTURE_TYPE_I; p->key_frame = 1; return avpkt->size; } else { av_log(avctx, AV_LOG_ERROR, "Frame contained no usable data\n"); return AVERROR_INVALIDDATA; } }