static void put_descr(PutByteContext *pb, int tag, unsigned int size) { int i = 3; bytestream2_put_byte(pb, tag); for (; i > 0; i--) bytestream2_put_byte(pb, (size >> (7 * i)) | 0x80); bytestream2_put_byte(pb, size & 0x7F); }
static int jpeg_create_huffman_table(PutByteContext *p, int table_class, int table_id, const uint8_t *bits_table, const uint8_t *value_table) { int i, n = 0; bytestream2_put_byte(p, table_class << 4 | table_id); for (i = 1; i <= 16; i++) { n += bits_table[i]; bytestream2_put_byte(p, bits_table[i]); } for (i = 0; i < n; i++) { bytestream2_put_byte(p, value_table[i]); } return n + 17; }
static uint8_t* ffat_get_magic_cookie(AVCodecContext *avctx, UInt32 *cookie_size) { ATDecodeContext *at = avctx->priv_data; if (avctx->codec_id == AV_CODEC_ID_AAC) { char *extradata; PutByteContext pb; *cookie_size = 5 + 3 + 5+13 + 5+at->extradata_size; if (!(extradata = av_malloc(*cookie_size))) return NULL; bytestream2_init_writer(&pb, extradata, *cookie_size); // ES descriptor put_descr(&pb, 0x03, 3 + 5+13 + 5+at->extradata_size); bytestream2_put_be16(&pb, 0); bytestream2_put_byte(&pb, 0x00); // flags (= no flags) // DecoderConfig descriptor put_descr(&pb, 0x04, 13 + 5+at->extradata_size); // Object type indication bytestream2_put_byte(&pb, 0x40); bytestream2_put_byte(&pb, 0x15); // flags (= Audiostream) bytestream2_put_be24(&pb, 0); // Buffersize DB bytestream2_put_be32(&pb, 0); // maxbitrate bytestream2_put_be32(&pb, 0); // avgbitrate // DecoderSpecific info descriptor put_descr(&pb, 0x05, at->extradata_size); bytestream2_put_buffer(&pb, at->extradata, at->extradata_size); return extradata; } else { *cookie_size = at->extradata_size; return at->extradata; } }
/* section_length does not include the header */ static void hap_write_section_header(PutByteContext *pbc, enum HapHeaderLength header_length, int section_length, enum HapSectionType section_type) { /* The first three bytes are the length of the section (not including the * header) or zero if using an eight-byte header. * For an eight-byte header, the length is in the last four bytes. * The fourth byte stores the section type. */ bytestream2_put_le24(pbc, header_length == HAP_HDR_LONG ? 0 : section_length); bytestream2_put_byte(pbc, section_type); if (header_length == HAP_HDR_LONG) { bytestream2_put_le32(pbc, section_length); } }
/* There are (invalid) samples in the wild with mp4-style extradata, where the * parameter sets are stored unescaped (i.e. as RBSP). * This function catches the parameter set decoding failure and tries again * after escaping it */ static int decode_extradata_ps_mp4(const uint8_t *buf, int buf_size, H264ParamSets *ps, int err_recognition, void *logctx) { int ret; ret = decode_extradata_ps(buf, buf_size, ps, 1, logctx); if (ret < 0 && !(err_recognition & AV_EF_EXPLODE)) { GetByteContext gbc; PutByteContext pbc; uint8_t *escaped_buf; int escaped_buf_size; av_log(logctx, AV_LOG_WARNING, "SPS decoding failure, trying again after escaping the NAL\n"); if (buf_size / 2 >= (INT16_MAX - AV_INPUT_BUFFER_PADDING_SIZE) / 3) return AVERROR(ERANGE); escaped_buf_size = buf_size * 3 / 2 + AV_INPUT_BUFFER_PADDING_SIZE; escaped_buf = av_mallocz(escaped_buf_size); if (!escaped_buf) return AVERROR(ENOMEM); bytestream2_init(&gbc, buf, buf_size); bytestream2_init_writer(&pbc, escaped_buf, escaped_buf_size); while (bytestream2_get_bytes_left(&gbc)) { if (bytestream2_get_bytes_left(&gbc) >= 3 && bytestream2_peek_be24(&gbc) <= 3) { bytestream2_put_be24(&pbc, 3); bytestream2_skip(&gbc, 2); } else bytestream2_put_byte(&pbc, bytestream2_get_byte(&gbc)); } escaped_buf_size = bytestream2_tell_p(&pbc); AV_WB16(escaped_buf, escaped_buf_size - 2); ret = decode_extradata_ps(escaped_buf, escaped_buf_size, ps, 1, logctx); av_freep(&escaped_buf); if (ret < 0) return ret; } return 0; }
/* This function creates an image description extension that some codecs need to be able * to decode properly, a copy of the strf (BITMAPINFOHEADER) chunk in the avi. * Return value: a handle to an image description extension which has to be DisposeHandle()'d * in cookieSize, the size of the image description extension is returned to the caller */ Handle create_strf_ext(AVCodecContext *codec) { Handle result = NULL; PutByteContext p; int size; /* initialize the extension * 40 bytes for the BITMAPINFOHEADER stucture, see avienc.c in the ffmpeg project * extradata_size for the data still stored in the AVCodecContext structure */ size = 40 + codec->extradata_size; result = NewHandle(size); if (result == NULL) goto bail; bytestream2_init_writer(&p, (uint8_t*)*result, size); /* construct the BITMAPINFOHEADER structure */ /* QT Atoms are big endian, but the STRF atom should be little endian */ bytestream2_put_be32(&p, size); bytestream2_put_le32(&p, codec->width); bytestream2_put_le32(&p, codec->width); bytestream2_put_le16(&p, 1); // planes bytestream2_put_le16(&p, codec->bits_per_coded_sample ?: 24); /* compression type */ bytestream2_put_le32(&p, codec->codec_tag); bytestream2_put_le32(&p, codec->width * codec->height * 3); bytestream2_put_le32(&p, 0); bytestream2_put_le32(&p, 0); bytestream2_put_le32(&p, 0); bytestream2_put_le32(&p, 0); /* now the remaining stuff */ bytestream2_put_buffer(&p, codec->extradata, codec->extradata_size); if (codec->extradata_size & 1) { bytestream2_put_byte(&p, 0); } bail: return result; } /* create_extension() */
static int vaapi_encode_mjpeg_init_picture_params(AVCodecContext *avctx, VAAPIEncodePicture *pic) { VAAPIEncodeMJPEGContext *priv = avctx->priv_data; JPEGRawFrameHeader *fh = &priv->frame_header; JPEGRawScanHeader *sh = &priv->scan.header; VAEncPictureParameterBufferJPEG *vpic = pic->codec_picture_params; const AVPixFmtDescriptor *desc; const uint8_t *components; int t, i, quant_scale, len; av_assert0(pic->type == PICTURE_TYPE_IDR); desc = av_pix_fmt_desc_get(priv->common.input_frames->sw_format); av_assert0(desc); if (desc->flags & AV_PIX_FMT_FLAG_RGB) components = (uint8_t[3]) { 'R', 'G', 'B' }; else components = (uint8_t[3]) { 1, 2, 3 }; // Frame header. fh->P = 8; fh->Y = avctx->height; fh->X = avctx->width; fh->Nf = desc->nb_components; for (i = 0; i < fh->Nf; i++) { fh->C[i] = components[i]; fh->H[i] = 1 + (i == 0 ? desc->log2_chroma_w : 0); fh->V[i] = 1 + (i == 0 ? desc->log2_chroma_h : 0); fh->Tq[i] = !!i; } fh->Lf = 8 + 3 * fh->Nf; // JFIF header. if (priv->jfif) { JPEGRawApplicationData *app = &priv->jfif_header; AVRational sar = pic->input_image->sample_aspect_ratio; int sar_w, sar_h; PutByteContext pbc; bytestream2_init_writer(&pbc, priv->jfif_data, sizeof(priv->jfif_data)); bytestream2_put_buffer(&pbc, "JFIF", 5); bytestream2_put_be16(&pbc, 0x0102); bytestream2_put_byte(&pbc, 0); av_reduce(&sar_w, &sar_h, sar.num, sar.den, 65535); if (sar_w && sar_h) { bytestream2_put_be16(&pbc, sar_w); bytestream2_put_be16(&pbc, sar_h); } else { bytestream2_put_be16(&pbc, 1); bytestream2_put_be16(&pbc, 1); } bytestream2_put_byte(&pbc, 0); bytestream2_put_byte(&pbc, 0); av_assert0(bytestream2_get_bytes_left_p(&pbc) == 0); app->Lp = 2 + sizeof(priv->jfif_data); app->Ap = priv->jfif_data; app->Ap_ref = NULL; } // Quantisation tables. if (priv->quality < 50) quant_scale = 5000 / priv->quality; else quant_scale = 200 - 2 * priv->quality; len = 2; for (t = 0; t < 1 + (fh->Nf > 1); t++) { JPEGRawQuantisationTable *quant = &priv->quant_tables.table[t]; const uint8_t *data = t == 0 ? vaapi_encode_mjpeg_quant_luminance : vaapi_encode_mjpeg_quant_chrominance; quant->Pq = 0; quant->Tq = t; for (i = 0; i < 64; i++) quant->Q[i] = av_clip(data[i] * quant_scale / 100, 1, 255); len += 65; } priv->quant_tables.Lq = len; // Huffman tables. len = 2; for (t = 0; t < 2 + 2 * (fh->Nf > 1); t++) { JPEGRawHuffmanTable *huff = &priv->huffman_tables.table[t]; const uint8_t *lengths, *values; int k; switch (t) { case 0: lengths = avpriv_mjpeg_bits_dc_luminance + 1; values = avpriv_mjpeg_val_dc; break; case 1: lengths = avpriv_mjpeg_bits_ac_luminance + 1; values = avpriv_mjpeg_val_ac_luminance; break; case 2: lengths = avpriv_mjpeg_bits_dc_chrominance + 1; values = avpriv_mjpeg_val_dc; break; case 3: lengths = avpriv_mjpeg_bits_ac_chrominance + 1; values = avpriv_mjpeg_val_ac_chrominance; break; } huff->Tc = t % 2; huff->Th = t / 2; for (i = k = 0; i < 16; i++) k += (huff->L[i] = lengths[i]); for (i = 0; i < k; i++) huff->V[i] = values[i]; len += 17 + k; } priv->huffman_tables.Lh = len; // Scan header. sh->Ns = fh->Nf; for (i = 0; i < fh->Nf; i++) { sh->Cs[i] = fh->C[i]; sh->Td[i] = i > 0; sh->Ta[i] = i > 0; } sh->Ss = 0; sh->Se = 63; sh->Ah = 0; sh->Al = 0; sh->Ls = 6 + 2 * sh->Ns; *vpic = (VAEncPictureParameterBufferJPEG) { .reconstructed_picture = pic->recon_surface, .coded_buf = pic->output_buffer, .picture_width = fh->X, .picture_height = fh->Y, .pic_flags.bits = { .profile = 0, .progressive = 0, .huffman = 1, .interleaved = 0, .differential = 0, }, .sample_bit_depth = fh->P, .num_scan = 1, .num_components = fh->Nf, // The driver modifies the provided quantisation tables according // to this quality value; the middle value of 50 makes that the // identity so that they are used unchanged. .quality = 50, }; for (i = 0; i < fh->Nf; i++) { vpic->component_id[i] = fh->C[i]; vpic->quantiser_table_selector[i] = fh->Tq[i]; } pic->nb_slices = 1; return 0; }
static int jpeg_create_header(uint8_t *buf, int size, uint32_t type, uint32_t w, uint32_t h, const uint8_t *qtable, int nb_qtable, int dri) { PutByteContext pbc; uint8_t *dht_size_ptr; int dht_size, i; bytestream2_init_writer(&pbc, buf, size); /* Convert from blocks to pixels. */ w <<= 3; h <<= 3; /* SOI */ jpeg_put_marker(&pbc, SOI); /* JFIF header */ jpeg_put_marker(&pbc, APP0); bytestream2_put_be16(&pbc, 16); bytestream2_put_buffer(&pbc, "JFIF", 5); bytestream2_put_be16(&pbc, 0x0201); bytestream2_put_byte(&pbc, 0); bytestream2_put_be16(&pbc, 1); bytestream2_put_be16(&pbc, 1); bytestream2_put_byte(&pbc, 0); bytestream2_put_byte(&pbc, 0); if (dri) { jpeg_put_marker(&pbc, DRI); bytestream2_put_be16(&pbc, 4); bytestream2_put_be16(&pbc, dri); } /* DQT */ jpeg_put_marker(&pbc, DQT); bytestream2_put_be16(&pbc, 2 + nb_qtable * (1 + 64)); for (i = 0; i < nb_qtable; i++) { bytestream2_put_byte(&pbc, i); /* Each table is an array of 64 values given in zig-zag * order, identical to the format used in a JFIF DQT * marker segment. */ bytestream2_put_buffer(&pbc, qtable + 64 * i, 64); } /* DHT */ jpeg_put_marker(&pbc, DHT); dht_size_ptr = pbc.buffer; bytestream2_put_be16(&pbc, 0); dht_size = 2; dht_size += jpeg_create_huffman_table(&pbc, 0, 0,avpriv_mjpeg_bits_dc_luminance, avpriv_mjpeg_val_dc); dht_size += jpeg_create_huffman_table(&pbc, 0, 1, avpriv_mjpeg_bits_dc_chrominance, avpriv_mjpeg_val_dc); dht_size += jpeg_create_huffman_table(&pbc, 1, 0, avpriv_mjpeg_bits_ac_luminance, avpriv_mjpeg_val_ac_luminance); dht_size += jpeg_create_huffman_table(&pbc, 1, 1, avpriv_mjpeg_bits_ac_chrominance, avpriv_mjpeg_val_ac_chrominance); AV_WB16(dht_size_ptr, dht_size); /* SOF0 */ jpeg_put_marker(&pbc, SOF0); bytestream2_put_be16(&pbc, 17); /* size */ bytestream2_put_byte(&pbc, 8); /* bits per component */ bytestream2_put_be16(&pbc, h); bytestream2_put_be16(&pbc, w); bytestream2_put_byte(&pbc, 3); /* number of components */ bytestream2_put_byte(&pbc, 1); /* component number */ bytestream2_put_byte(&pbc, (2 << 4) | (type ? 2 : 1)); /* hsample/vsample */ bytestream2_put_byte(&pbc, 0); /* matrix number */ bytestream2_put_byte(&pbc, 2); /* component number */ bytestream2_put_byte(&pbc, 1 << 4 | 1); /* hsample/vsample */ bytestream2_put_byte(&pbc, nb_qtable == 2 ? 1 : 0); /* matrix number */ bytestream2_put_byte(&pbc, 3); /* component number */ bytestream2_put_byte(&pbc, 1 << 4 | 1); /* hsample/vsample */ bytestream2_put_byte(&pbc, nb_qtable == 2 ? 1 : 0); /* matrix number */ /* SOS */ jpeg_put_marker(&pbc, SOS); bytestream2_put_be16(&pbc, 12); bytestream2_put_byte(&pbc, 3); bytestream2_put_byte(&pbc, 1); bytestream2_put_byte(&pbc, 0); bytestream2_put_byte(&pbc, 2); bytestream2_put_byte(&pbc, 17); bytestream2_put_byte(&pbc, 3); bytestream2_put_byte(&pbc, 17); bytestream2_put_byte(&pbc, 0); bytestream2_put_byte(&pbc, 63); bytestream2_put_byte(&pbc, 0); /* Return the length in bytes of the JPEG header. */ return bytestream2_tell_p(&pbc); }
static void jpeg_put_marker(PutByteContext *pbc, int code) { bytestream2_put_byte(pbc, 0xff); bytestream2_put_byte(pbc, code); }