unsigned avutil_version(void) { static int checks_done; if (checks_done) return LIBAVUTIL_VERSION_INT; av_assert0(AV_PIX_FMT_VDA_VLD == 81); //check if the pix fmt enum has not had anything inserted or removed by mistake av_assert0(AV_SAMPLE_FMT_DBLP == 9); av_assert0(AVMEDIA_TYPE_ATTACHMENT == 4); av_assert0(AV_PICTURE_TYPE_BI == 7); av_assert0(LIBAVUTIL_VERSION_MICRO >= 100); av_assert0(HAVE_MMX2 == HAVE_MMXEXT); av_assert0(((size_t)-1) > 0); // C guarantees this but if false on a platform we care about revert at least b284e1ffe343d6697fb950d1ee517bafda8a9844 if (av_sat_dadd32(1, 2) != 5) { av_log(NULL, AV_LOG_FATAL, "Libavutil has been build with a broken binutils, please upgrade binutils and rebuild\n"); abort(); } if (llrint(LLN(1)<<60) != LLN(1)<<60) { av_log(NULL, AV_LOG_ERROR, "Libavutil has been linked to a broken llrint()\n"); } #if defined(ASSERT_LEVEL) && ASSERT_LEVEL > 0 ff_check_pixfmt_descriptors(); #endif checks_done = 1; return LIBAVUTIL_VERSION_INT; }
static int subviewer1_read_header(AVFormatContext *s) { int delay = 0; AVPacket *sub = NULL; SubViewer1Context *subviewer1 = s->priv_data; AVStream *st = avformat_new_stream(s, NULL); if (!st) return AVERROR(ENOMEM); avpriv_set_pts_info(st, 64, 1, 1); st->codec->codec_type = AVMEDIA_TYPE_SUBTITLE; st->codec->codec_id = AV_CODEC_ID_SUBVIEWER1; while (!avio_feof(s->pb)) { char line[4096]; int len = ff_get_line(s->pb, line, sizeof(line)); int hh, mm, ss; if (!len) break; if (!strncmp(line, "[DELAY]", 7)) { ff_get_line(s->pb, line, sizeof(line)); sscanf(line, "%d", &delay); } if (sscanf(line, "[%d:%d:%d]", &hh, &mm, &ss) == 3) { const int64_t pos = avio_tell(s->pb); int64_t pts_start = hh*LLN(3600) + mm*LLN(60) + ss + delay; len = ff_get_line(s->pb, line, sizeof(line)); line[strcspn(line, "\r\n")] = 0; if (!*line) { if (sub) sub->duration = pts_start - sub->pts; } else { sub = ff_subtitles_queue_insert(&subviewer1->q, line, len, 0); if (!sub) return AVERROR(ENOMEM); sub->pos = pos; sub->pts = pts_start; sub->duration = -1; } } } ff_subtitles_queue_finalize(&subviewer1->q); return 0; }
static char *doubles2str(double *dp, int count, const char *sep) { int i; char *ap, *ap0; uint64_t component_len; if (!sep) sep = ", "; component_len = LLN(24) + strlen(sep); if (count >= (INT_MAX - 1)/component_len) return NULL; ap = av_malloc(component_len * count + 1); if (!ap) return NULL; ap0 = ap; ap[0] = '\0'; for (i = 0; i < count; i++) { unsigned l = snprintf(ap, component_len, "%.15g%s", dp[i], sep); if(l >= component_len) { av_free(ap0); return NULL; } ap += l; } ap0[strlen(ap0) - strlen(sep)] = '\0'; return ap0; }
static int sox_write_trailer(AVFormatContext *s) { SoXContext *sox = s->priv_data; AVIOContext *pb = s->pb; AVCodecContext *enc = s->streams[0]->codec; if (s->pb->seekable) { /* update number of samples */ int64_t file_size = avio_tell(pb); int64_t num_samples = (file_size - sox->header_size - LLN(4)) >> LLN(2); avio_seek(pb, 8, SEEK_SET); if (enc->codec_id == AV_CODEC_ID_PCM_S32LE) { avio_wl64(pb, num_samples); } else avio_wb64(pb, num_samples); avio_seek(pb, file_size, SEEK_SET); avio_flush(pb); }
int av_packet_merge_side_data(AVPacket *pkt){ if(pkt->side_data_elems){ AVBufferRef *buf; int i; uint8_t *p; uint64_t size= pkt->size + LLN(8) + FF_INPUT_BUFFER_PADDING_SIZE; AVPacket old= *pkt; for (i=0; i<old.side_data_elems; i++) { size += old.side_data[i].size + LLN(5); } if (size > INT_MAX) return AVERROR(EINVAL); buf = av_buffer_alloc(size); if (!buf) return AVERROR(ENOMEM); pkt->buf = buf; pkt->data = p = buf->data; #if FF_API_DESTRUCT_PACKET FF_DISABLE_DEPRECATION_WARNINGS pkt->destruct = dummy_destruct_packet; FF_ENABLE_DEPRECATION_WARNINGS #endif pkt->size = size - FF_INPUT_BUFFER_PADDING_SIZE; bytestream_put_buffer(&p, old.data, old.size); for (i=old.side_data_elems-1; i>=0; i--) { bytestream_put_buffer(&p, old.side_data[i].data, old.side_data[i].size); bytestream_put_be32(&p, old.side_data[i].size); *p++ = old.side_data[i].type | ((i==old.side_data_elems-1)*128); } bytestream_put_be64(&p, FF_MERGE_MARKER); av_assert0(p-pkt->data == pkt->size); memset(p, 0, FF_INPUT_BUFFER_PADDING_SIZE); av_free_packet(&old); pkt->side_data_elems = 0; pkt->side_data = NULL; return 1; } return 0; }
static int gif_image_write_header(AVFormatContext *s, int width, int height, int loop_count, uint32_t *palette) { AVIOContext *pb = s->pb; AVRational sar = s->streams[0]->codec->sample_aspect_ratio; int i; int64_t aspect = 0; if (sar.num > 0 && sar.den > 0) { aspect = sar.num * LLN(64) / sar.den - 15; if (aspect < 0 || aspect > 255) aspect = 0; } avio_write(pb, "GIF", 3); avio_write(pb, "89a", 3); avio_wl16(pb, width); avio_wl16(pb, height); if (palette) { avio_w8(pb, 0xf7); /* flags: global clut, 256 entries */ avio_w8(pb, 0x1f); /* background color index */ avio_w8(pb, aspect); for (i = 0; i < 256; i++) { const uint32_t v = palette[i] & 0xffffff; avio_wb24(pb, v); } } else { avio_w8(pb, 0); /* flags */ avio_w8(pb, 0); /* background color index */ avio_w8(pb, aspect); } if (loop_count >= 0 ) { /* "NETSCAPE EXTENSION" for looped animation GIF */ avio_w8(pb, 0x21); /* GIF Extension code */ avio_w8(pb, 0xff); /* Application Extension Label */ avio_w8(pb, 0x0b); /* Length of Application Block */ avio_write(pb, "NETSCAPE2.0", sizeof("NETSCAPE2.0") - 1); avio_w8(pb, 0x03); /* Length of Data Sub-Block */ avio_w8(pb, 0x01); avio_wl16(pb, (uint16_t)loop_count); avio_w8(pb, 0x00); /* Data Sub-block Terminator */ } return 0; }
static int read_ts(const char *s, int64_t *start, int *duration) { int64_t end; int hh1, mm1, ss1, ms1; int hh2, mm2, ss2, ms2; if (sscanf(s, "%u:%u:%u.%u,%u:%u:%u.%u", &hh1, &mm1, &ss1, &ms1, &hh2, &mm2, &ss2, &ms2) == 8) { end = (hh2*LLN(3600) + mm2*LLN(60) + ss2) * LLN(100) + ms2; *start = (hh1*LLN(3600) + mm1*LLN(60) + ss1) * LLN(100) + ms1; *duration = end - *start; return 0; } return -1; }
/** * PCX run-length encoder * @param dst output buffer * @param dst_size size of output buffer * @param src input buffer * @param src_plane_size size of one plane of input buffer in bytes * @param nplanes number of planes in input buffer * @return number of bytes written to dst or -1 on error * @bug will not work for nplanes != 1 && bpp != 8 */ static int pcx_rle_encode( uint8_t *dst, int dst_size, const uint8_t *src, int src_plane_size, int nplanes) { int p; const uint8_t *dst_start = dst; // check worst-case upper bound on dst_size if (dst_size < LLN(2) * src_plane_size * nplanes || src_plane_size <= 0) return -1; for (p = 0; p < nplanes; p++) { int count = 1; const uint8_t *src_plane = src + p; const uint8_t *src_plane_end = src_plane + src_plane_size * nplanes; uint8_t prev = *src_plane; src_plane += nplanes; for (; ; src_plane += nplanes) { if (src_plane < src_plane_end && *src_plane == prev && count < 0x3F) { // current byte is same as prev ++count; } else { // output prev * count if (count != 1 || prev >= 0xC0) *dst++ = 0xC0 | count; *dst++ = prev; if (src_plane == src_plane_end) break; // start new run count = 1; prev = *src_plane; } } } return dst - dst_start; }
static int64_t get_pts(const char **buf, int *duration, int32_t *x1, int32_t *y1, int32_t *x2, int32_t *y2) { int i; for (i=0; i<2; i++) { int hh1, mm1, ss1, ms1; int hh2, mm2, ss2, ms2; if (sscanf(*buf, "%d:%2d:%2d%*1[,.]%3d --> %d:%2d:%2d%*1[,.]%3d" "%*[ ]X1:%u X2:%u Y1:%u Y2:%u", &hh1, &mm1, &ss1, &ms1, &hh2, &mm2, &ss2, &ms2, x1, x2, y1, y2) >= 8) { int64_t start = (hh1*LLN(3600) + mm1*LLN(60) + ss1) * LLN(1000) + ms1; int64_t end = (hh2*LLN(3600) + mm2*LLN(60) + ss2) * LLN(1000) + ms2; *duration = end - start; *buf += ff_subtitles_next_line(*buf); return start; } *buf += ff_subtitles_next_line(*buf); } return AV_NOPTS_VALUE; }
static int read_header(AVFormatContext *s) { BRSTMDemuxContext *b = s->priv_data; int bom, major, minor, codec, chunk; int64_t pos, h1offset, toffset; uint32_t size, start, asize; AVStream *st; int ret = AVERROR_EOF; st = avformat_new_stream(s, NULL); if (!st) return AVERROR(ENOMEM); st->codec->codec_type = AVMEDIA_TYPE_AUDIO; avio_skip(s->pb, 4); bom = avio_rb16(s->pb); if (bom != 0xFEFF && bom != 0xFFFE) { av_log(s, AV_LOG_ERROR, "invalid byte order: %X\n", bom); return AVERROR_INVALIDDATA; } if (bom == 0xFFFE) { avpriv_request_sample(s, "little endian byte order"); return AVERROR_PATCHWELCOME; } major = avio_r8(s->pb); minor = avio_r8(s->pb); avio_skip(s->pb, 4); // size of file size = avio_rb16(s->pb); if (size < 14) return AVERROR_INVALIDDATA; avio_skip(s->pb, size - 14); pos = avio_tell(s->pb); if (avio_rl32(s->pb) != MKTAG('H','E','A','D')) return AVERROR_INVALIDDATA; size = avio_rb32(s->pb); if (size < 256) return AVERROR_INVALIDDATA; avio_skip(s->pb, 4); // unknown h1offset = avio_rb32(s->pb); if (h1offset > size) return AVERROR_INVALIDDATA; avio_skip(s->pb, 12); toffset = avio_rb32(s->pb) + LLN(16); if (toffset > size) return AVERROR_INVALIDDATA; avio_skip(s->pb, pos + h1offset + 8 - avio_tell(s->pb)); codec = avio_r8(s->pb); switch (codec) { case 0: codec = AV_CODEC_ID_PCM_S8_PLANAR; break; case 1: codec = AV_CODEC_ID_PCM_S16BE_PLANAR; break; case 2: codec = AV_CODEC_ID_ADPCM_THP; break; default: avpriv_request_sample(s, "codec %d", codec); return AVERROR_PATCHWELCOME; } avio_skip(s->pb, 1); // loop flag st->codec->codec_id = codec; st->codec->channels = avio_r8(s->pb); if (!st->codec->channels) return AVERROR_INVALIDDATA; avio_skip(s->pb, 1); // padding st->codec->sample_rate = avio_rb16(s->pb); if (!st->codec->sample_rate) return AVERROR_INVALIDDATA; avio_skip(s->pb, 2); // padding avio_skip(s->pb, 4); // loop start sample st->start_time = 0; st->duration = avio_rb32(s->pb); avpriv_set_pts_info(st, 64, 1, st->codec->sample_rate); start = avio_rb32(s->pb); b->current_block = 0; b->block_count = avio_rb32(s->pb); if (b->block_count > UINT16_MAX) { av_log(s, AV_LOG_WARNING, "too many blocks: %u\n", b->block_count); return AVERROR_INVALIDDATA; } b->block_size = avio_rb32(s->pb); if (b->block_size > UINT16_MAX / st->codec->channels) return AVERROR_INVALIDDATA; b->block_size *= st->codec->channels; b->samples_per_block = avio_rb32(s->pb); b->last_block_used_bytes = avio_rb32(s->pb); if (b->last_block_used_bytes > UINT16_MAX / st->codec->channels) return AVERROR_INVALIDDATA; b->last_block_used_bytes *= st->codec->channels; avio_skip(s->pb, 4); // last block samples avio_skip(s->pb, 4); // last block size if (codec == AV_CODEC_ID_ADPCM_THP) { int ch; avio_skip(s->pb, pos + toffset - avio_tell(s->pb)); toffset = avio_rb32(s->pb) + LLN(16); if (toffset > size) return AVERROR_INVALIDDATA; avio_skip(s->pb, pos + toffset - avio_tell(s->pb)); b->table = av_mallocz(32 * st->codec->channels); if (!b->table) return AVERROR(ENOMEM); for (ch = 0; ch < st->codec->channels; ch++) { if (avio_read(s->pb, b->table + ch * 32, 32) != 32) { ret = AVERROR_INVALIDDATA; goto fail; } avio_skip(s->pb, 24); } } if (size < (avio_tell(s->pb) - pos)) { ret = AVERROR_INVALIDDATA; goto fail; } avio_skip(s->pb, size - (avio_tell(s->pb) - pos)); while (!avio_feof(s->pb)) { chunk = avio_rl32(s->pb); size = avio_rb32(s->pb); if (size < 8) { ret = AVERROR_INVALIDDATA; goto fail; } size -= 8; switch (chunk) { case MKTAG('A','D','P','C'): if (codec != AV_CODEC_ID_ADPCM_THP) goto skip; asize = b->block_count * st->codec->channels * 4; if (size < asize) { ret = AVERROR_INVALIDDATA; goto fail; } if (b->adpc) { av_log(s, AV_LOG_WARNING, "skipping additional ADPC chunk\n"); goto skip; } else { b->adpc = av_mallocz(asize); if (!b->adpc) { ret = AVERROR(ENOMEM); goto fail; } avio_read(s->pb, b->adpc, asize); avio_skip(s->pb, size - asize); } break; case MKTAG('D','A','T','A'): if ((start < avio_tell(s->pb)) || (!b->adpc && codec == AV_CODEC_ID_ADPCM_THP)) { ret = AVERROR_INVALIDDATA; goto fail; } avio_skip(s->pb, start - avio_tell(s->pb)); if (major != 1 || minor) avpriv_request_sample(s, "Version %d.%d", major, minor); return 0; default: av_log(s, AV_LOG_WARNING, "skipping unknown chunk: %X\n", chunk); skip: avio_skip(s->pb, size); } } fail: read_close(s); return ret; }
static int bmp_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet) { const AVFrame * const p = pict; int n_bytes_image, n_bytes_per_row, n_bytes, i, n, hsize, ret; const uint32_t *pal = NULL; uint32_t palette256[256]; int pad_bytes_per_row, pal_entries = 0, compression = BMP_RGB; int bit_count = avctx->bits_per_coded_sample; uint8_t *ptr, *buf; avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I; avctx->coded_frame->key_frame = 1; switch (avctx->pix_fmt) { case AV_PIX_FMT_RGB444: compression = BMP_BITFIELDS; pal = rgb444_masks; // abuse pal to hold color masks pal_entries = 3; break; case AV_PIX_FMT_RGB565: compression = BMP_BITFIELDS; pal = rgb565_masks; // abuse pal to hold color masks pal_entries = 3; break; case AV_PIX_FMT_RGB8: case AV_PIX_FMT_BGR8: case AV_PIX_FMT_RGB4_BYTE: case AV_PIX_FMT_BGR4_BYTE: case AV_PIX_FMT_GRAY8: av_assert1(bit_count == 8); avpriv_set_systematic_pal2(palette256, avctx->pix_fmt); pal = palette256; break; case AV_PIX_FMT_PAL8: pal = (uint32_t *)p->data[1]; break; case AV_PIX_FMT_MONOBLACK: pal = monoblack_pal; break; } if (pal && !pal_entries) pal_entries = 1 << bit_count; n_bytes_per_row = ((int64_t)avctx->width * (int64_t)bit_count + LLN(7)) >> LLN(3); pad_bytes_per_row = (4 - n_bytes_per_row) & 3; n_bytes_image = avctx->height * (n_bytes_per_row + pad_bytes_per_row); // STRUCTURE.field refer to the MSVC documentation for BITMAPFILEHEADER // and related pages. #define SIZE_BITMAPFILEHEADER 14 #define SIZE_BITMAPINFOHEADER 40 hsize = SIZE_BITMAPFILEHEADER + SIZE_BITMAPINFOHEADER + (pal_entries << 2); n_bytes = n_bytes_image + hsize; if ((ret = ff_alloc_packet2(avctx, pkt, n_bytes)) < 0) return ret; buf = pkt->data; bytestream_put_byte(&buf, 'B'); // BITMAPFILEHEADER.bfType bytestream_put_byte(&buf, 'M'); // do. bytestream_put_le32(&buf, n_bytes); // BITMAPFILEHEADER.bfSize bytestream_put_le16(&buf, 0); // BITMAPFILEHEADER.bfReserved1 bytestream_put_le16(&buf, 0); // BITMAPFILEHEADER.bfReserved2 bytestream_put_le32(&buf, hsize); // BITMAPFILEHEADER.bfOffBits bytestream_put_le32(&buf, SIZE_BITMAPINFOHEADER); // BITMAPINFOHEADER.biSize bytestream_put_le32(&buf, avctx->width); // BITMAPINFOHEADER.biWidth bytestream_put_le32(&buf, avctx->height); // BITMAPINFOHEADER.biHeight bytestream_put_le16(&buf, 1); // BITMAPINFOHEADER.biPlanes bytestream_put_le16(&buf, bit_count); // BITMAPINFOHEADER.biBitCount bytestream_put_le32(&buf, compression); // BITMAPINFOHEADER.biCompression bytestream_put_le32(&buf, n_bytes_image); // BITMAPINFOHEADER.biSizeImage bytestream_put_le32(&buf, 0); // BITMAPINFOHEADER.biXPelsPerMeter bytestream_put_le32(&buf, 0); // BITMAPINFOHEADER.biYPelsPerMeter bytestream_put_le32(&buf, 0); // BITMAPINFOHEADER.biClrUsed bytestream_put_le32(&buf, 0); // BITMAPINFOHEADER.biClrImportant for (i = 0; i < pal_entries; i++) bytestream_put_le32(&buf, pal[i] & 0xFFFFFF); // BMP files are bottom-to-top so we start from the end... ptr = p->data[0] + (avctx->height - 1) * p->linesize[0]; buf = pkt->data + hsize; for(i = 0; i < avctx->height; i++) { if (bit_count == 16) { const uint16_t *src = (const uint16_t *) ptr; uint16_t *dst = (uint16_t *) buf; for(n = 0; n < avctx->width; n++) AV_WL16(dst + n, src[n]); } else { memcpy(buf, ptr, n_bytes_per_row); } buf += n_bytes_per_row; memset(buf, 0, pad_bytes_per_row); buf += pad_bytes_per_row; ptr -= p->linesize[0]; // ... and go back } pkt->flags |= AV_PKT_FLAG_KEY; *got_packet = 1; return 0; }
static int dirac_combine_frame(AVCodecParserContext *s, AVCodecContext *avctx, int next, const uint8_t **buf, int *buf_size) { int parse_timing_info = (s->pts == AV_NOPTS_VALUE && s->dts == AV_NOPTS_VALUE); DiracParseContext *pc = s->priv_data; if (pc->overread_index) { memcpy(pc->buffer, pc->buffer + pc->overread_index, pc->index - pc->overread_index); pc->index -= pc->overread_index; pc->overread_index = 0; if (*buf_size == 0 && pc->buffer[4] == 0x10) { *buf = pc->buffer; *buf_size = pc->index; return 0; } } if (next == -1) { /* Found a possible frame start but not a frame end */ void *new_buffer = av_fast_realloc(pc->buffer, &pc->buffer_size, pc->index + (*buf_size - pc->sync_offset)); pc->buffer = new_buffer; memcpy(pc->buffer + pc->index, (*buf + pc->sync_offset), *buf_size - pc->sync_offset); pc->index += *buf_size - pc->sync_offset; return -1; } else { /* Found a possible frame start and a possible frame end */ DiracParseUnit pu1, pu; void *new_buffer = av_fast_realloc(pc->buffer, &pc->buffer_size, pc->index + next); pc->buffer = new_buffer; memcpy(pc->buffer + pc->index, *buf, next); pc->index += next; /* Need to check if we have a valid Parse Unit. We can't go by the * sync pattern 'BBCD' alone because arithmetic coding of the residual * and motion data can cause the pattern triggering a false start of * frame. So check if the previous parse offset of the next parse unit * is equal to the next parse offset of the current parse unit then * we can be pretty sure that we have a valid parse unit */ if (!unpack_parse_unit(&pu1, pc, pc->index - 13) || !unpack_parse_unit(&pu, pc, pc->index - 13 - pu1.prev_pu_offset) || pu.next_pu_offset != pu1.prev_pu_offset || pc->index < pc->dirac_unit_size + LLN(13) + pu1.prev_pu_offset ) { pc->index -= 9; *buf_size = next - 9; pc->header_bytes_needed = 9; return -1; } /* All non-frame data must be accompanied by frame data. This is to * ensure that pts is set correctly. So if the current parse unit is * not frame data, wait for frame data to come along */ pc->dirac_unit = pc->buffer + pc->index - 13 - pu1.prev_pu_offset - pc->dirac_unit_size; pc->dirac_unit_size += pu.next_pu_offset; if ((pu.pu_type & 0x08) != 0x08) { pc->header_bytes_needed = 9; *buf_size = next; return -1; } /* Get the picture number to set the pts and dts*/ if (parse_timing_info) { uint8_t *cur_pu = pc->buffer + pc->index - 13 - pu1.prev_pu_offset; int pts = AV_RB32(cur_pu + 13); if (s->last_pts == 0 && s->last_dts == 0) s->dts = pts - 1; else s->dts = s->last_dts + 1; s->pts = pts; if (!avctx->has_b_frames && (cur_pu[4] & 0x03)) avctx->has_b_frames = 1; } if (avctx->has_b_frames && s->pts == s->dts) s->pict_type = AV_PICTURE_TYPE_B; /* Finally have a complete Dirac data unit */ *buf = pc->dirac_unit; *buf_size = pc->dirac_unit_size; pc->dirac_unit_size = 0; pc->overread_index = pc->index - 13; pc->header_bytes_needed = 9; } return next; }
static int rm_read_audio_stream_info(AVFormatContext *s, AVIOContext *pb, AVStream *st, RMStream *ast, int read_all) { char buf[256]; uint32_t version; int ret; /* ra type header */ version = avio_rb16(pb); /* version */ if (version == 3) { unsigned bytes_per_minute; int header_size = avio_rb16(pb); int64_t startpos = avio_tell(pb); avio_skip(pb, 8); bytes_per_minute = avio_rb16(pb); avio_skip(pb, 4); rm_read_metadata(s, pb, 0); if ((startpos + header_size) >= avio_tell(pb) + 2) { // fourcc (should always be "lpcJ") avio_r8(pb); get_str8(pb, buf, sizeof(buf)); } // Skip extra header crap (this should never happen) if ((startpos + header_size) > avio_tell(pb)) avio_skip(pb, header_size + startpos - avio_tell(pb)); if (bytes_per_minute) st->codec->bit_rate = LLN(8) * bytes_per_minute / 60; st->codec->sample_rate = 8000; st->codec->channels = 1; st->codec->channel_layout = AV_CH_LAYOUT_MONO; st->codec->codec_type = AVMEDIA_TYPE_AUDIO; st->codec->codec_id = AV_CODEC_ID_RA_144; ast->deint_id = DEINT_ID_INT0; } else { int flavor, sub_packet_h, coded_framesize, sub_packet_size; int codecdata_length; unsigned bytes_per_minute; /* old version (4) */ avio_skip(pb, 2); /* unused */ avio_rb32(pb); /* .ra4 */ avio_rb32(pb); /* data size */ avio_rb16(pb); /* version2 */ avio_rb32(pb); /* header size */ flavor= avio_rb16(pb); /* add codec info / flavor */ ast->coded_framesize = coded_framesize = avio_rb32(pb); /* coded frame size */ avio_rb32(pb); /* ??? */ bytes_per_minute = avio_rb32(pb); if (version == 4) { if (bytes_per_minute) st->codec->bit_rate = LLN(8) * bytes_per_minute / 60; } avio_rb32(pb); /* ??? */ ast->sub_packet_h = sub_packet_h = avio_rb16(pb); /* 1 */ st->codec->block_align= avio_rb16(pb); /* frame size */ ast->sub_packet_size = sub_packet_size = avio_rb16(pb); /* sub packet size */ avio_rb16(pb); /* ??? */ if (version == 5) { avio_rb16(pb); avio_rb16(pb); avio_rb16(pb); } st->codec->sample_rate = avio_rb16(pb); avio_rb32(pb); st->codec->channels = avio_rb16(pb); if (version == 5) { ast->deint_id = avio_rl32(pb); avio_read(pb, buf, 4); buf[4] = 0; } else { AV_WL32(buf, 0); get_str8(pb, buf, sizeof(buf)); /* desc */ ast->deint_id = AV_RL32(buf); get_str8(pb, buf, sizeof(buf)); /* desc */ } st->codec->codec_type = AVMEDIA_TYPE_AUDIO; st->codec->codec_tag = AV_RL32(buf); st->codec->codec_id = ff_codec_get_id(ff_rm_codec_tags, st->codec->codec_tag); switch (st->codec->codec_id) { case AV_CODEC_ID_AC3: st->need_parsing = AVSTREAM_PARSE_FULL; break; case AV_CODEC_ID_RA_288: st->codec->extradata_size= 0; ast->audio_framesize = st->codec->block_align; st->codec->block_align = coded_framesize; break; case AV_CODEC_ID_COOK: st->need_parsing = AVSTREAM_PARSE_HEADERS; case AV_CODEC_ID_ATRAC3: case AV_CODEC_ID_SIPR: if (read_all) { codecdata_length = 0; } else { avio_rb16(pb); avio_r8(pb); if (version == 5) avio_r8(pb); codecdata_length = avio_rb32(pb); if(codecdata_length + FF_INPUT_BUFFER_PADDING_SIZE <= (unsigned)codecdata_length){ av_log(s, AV_LOG_ERROR, "codecdata_length too large\n"); return -1; } } ast->audio_framesize = st->codec->block_align; if (st->codec->codec_id == AV_CODEC_ID_SIPR) { if (flavor > 3) { av_log(s, AV_LOG_ERROR, "bad SIPR file flavor %d\n", flavor); return -1; } st->codec->block_align = ff_sipr_subpk_size[flavor]; } else { if(sub_packet_size <= 0){ av_log(s, AV_LOG_ERROR, "sub_packet_size is invalid\n"); return -1; } st->codec->block_align = ast->sub_packet_size; } if ((ret = rm_read_extradata(pb, st->codec, codecdata_length)) < 0) return ret; break; case AV_CODEC_ID_AAC: avio_rb16(pb); avio_r8(pb); if (version == 5) avio_r8(pb); codecdata_length = avio_rb32(pb); if(codecdata_length + FF_INPUT_BUFFER_PADDING_SIZE <= (unsigned)codecdata_length){ av_log(s, AV_LOG_ERROR, "codecdata_length too large\n"); return -1; } if (codecdata_length >= 1) { avio_r8(pb); if ((ret = rm_read_extradata(pb, st->codec, codecdata_length - 1)) < 0) return ret; } break; } switch (ast->deint_id) { case DEINT_ID_INT4: if (ast->coded_framesize > ast->audio_framesize || sub_packet_h <= 1 || ast->coded_framesize * sub_packet_h > (2 + (sub_packet_h & 1)) * ast->audio_framesize) return AVERROR_INVALIDDATA; if (ast->coded_framesize * sub_packet_h != 2*ast->audio_framesize) { avpriv_request_sample(s, "mismatching interleaver parameters"); return AVERROR_INVALIDDATA; } break; case DEINT_ID_GENR: if (ast->sub_packet_size <= 0 || ast->sub_packet_size > ast->audio_framesize) return AVERROR_INVALIDDATA; if (ast->audio_framesize % ast->sub_packet_size) return AVERROR_INVALIDDATA; break; case DEINT_ID_SIPR: case DEINT_ID_INT0: case DEINT_ID_VBRS: case DEINT_ID_VBRF: break; default: av_log(s, AV_LOG_ERROR ,"Unknown interleaver %"PRIX32"\n", ast->deint_id); return AVERROR_INVALIDDATA; } if (ast->deint_id == DEINT_ID_INT4 || ast->deint_id == DEINT_ID_GENR || ast->deint_id == DEINT_ID_SIPR) { if (st->codec->block_align <= 0 || ast->audio_framesize * sub_packet_h > (unsigned)INT_MAX || ast->audio_framesize * sub_packet_h < st->codec->block_align) return AVERROR_INVALIDDATA; if (av_new_packet(&ast->pkt, ast->audio_framesize * sub_packet_h) < 0) return AVERROR(ENOMEM); } if (read_all) { avio_r8(pb); avio_r8(pb); avio_r8(pb); rm_read_metadata(s, pb, 0); } } return 0; }