Exemple #1
0
/* This function creates an image description extension that some codecs need to be able
 * to decode properly, a copy of the strf (BITMAPINFOHEADER) chunk in the avi.
 * Return value: a handle to an image description extension which has to be DisposeHandle()'d
 * in cookieSize, the size of the image description extension is returned to the caller */
Handle create_strf_ext(AVCodecContext *codec)
{
    Handle result = NULL;
    PutByteContext p;
    int size;

    /* initialize the extension
     * 40 bytes			for the BITMAPINFOHEADER stucture, see avienc.c in the ffmpeg project
     * extradata_size	for the data still stored in the AVCodecContext structure */
    size = 40 + codec->extradata_size;
    result = NewHandle(size);
    if (result == NULL)
        goto bail;

    bytestream2_init_writer(&p, (uint8_t*)*result, size);

    /* construct the BITMAPINFOHEADER structure */
    /* QT Atoms are big endian, but the STRF atom should be little endian */
    bytestream2_put_be32(&p, size);
    bytestream2_put_le32(&p, codec->width);
    bytestream2_put_le32(&p, codec->width);
    bytestream2_put_le16(&p, 1); // planes
    bytestream2_put_le16(&p, codec->bits_per_coded_sample ?: 24);

    /* compression type */
    bytestream2_put_le32(&p, codec->codec_tag);
    bytestream2_put_le32(&p, codec->width * codec->height * 3);
    bytestream2_put_le32(&p, 0);
    bytestream2_put_le32(&p, 0);
    bytestream2_put_le32(&p, 0);
    bytestream2_put_le32(&p, 0);

    /* now the remaining stuff */
    bytestream2_put_buffer(&p, codec->extradata, codec->extradata_size);

    if (codec->extradata_size & 1) {
        bytestream2_put_byte(&p, 0);
    }

bail:
    return result;
} /* create_extension() */
Exemple #2
0
/* This function creates a magic cookie basec on the codec parameter and formatID
 * Return value: a pointer to a magic cookie which has to be av_free()'d
 * in cookieSize, the size of the magic cookie is returned to the caller */
uint8_t *create_cookie(AVCodecContext *codec, size_t *cookieSize, UInt32 formatID, int vbr)
{
    uint8_t *waveAtom = NULL;
    PutByteContext p;
    int waveSize;

    if (formatID == kAudioFormatMPEG4AAC) {
        return CreateEsdsFromSetupData(codec->extradata, codec->extradata_size, cookieSize, 1, true, false);
    }

    /* Do we need an endia atom, too? */

    /* initialize the user Atom
    	* 8 bytes			for the atom size & atom type
    	* 18 bytes			for the already extracted part, see wav.c in the ffmpeg project
    	* extradata_size	for the data still stored in the AVCodecContext structure */
    waveSize = 18 + codec->extradata_size + 8;
    waveAtom = av_malloc(waveSize);
    if (!waveAtom)
        goto bail;

    bytestream2_init_writer(&p, waveAtom, waveSize);

    /* now construct the wave atom */
    /* QT Atoms are big endian, I think but the WAVE data should be little endian */
    bytestream2_put_be32(&p, waveSize);
    bytestream2_put_be32(&p, formatID);
    bytestream2_put_le32(&p, codec->codec_tag);
    bytestream2_put_le32(&p, codec->channels);
    bytestream2_put_le32(&p, codec->sample_rate);
    bytestream2_put_le32(&p, vbr ? 0 : codec->bit_rate / 8);
    bytestream2_put_le16(&p, codec->block_align);
    bytestream2_put_le16(&p, codec->bits_per_coded_sample);
    bytestream2_put_le16(&p, codec->extradata_size);

    /* now the remaining stuff */
    bytestream2_put_buffer(&p, codec->extradata, codec->extradata_size);

bail:
    *cookieSize = waveSize;
    return waveAtom;
} /* create_cookie() */
Exemple #3
0
static uint8_t* ffat_get_magic_cookie(AVCodecContext *avctx, UInt32 *cookie_size)
{
    ATDecodeContext *at = avctx->priv_data;
    if (avctx->codec_id == AV_CODEC_ID_AAC) {
        char *extradata;
        PutByteContext pb;
        *cookie_size = 5 + 3 + 5+13 + 5+at->extradata_size;
        if (!(extradata = av_malloc(*cookie_size)))
            return NULL;

        bytestream2_init_writer(&pb, extradata, *cookie_size);

        // ES descriptor
        put_descr(&pb, 0x03, 3 + 5+13 + 5+at->extradata_size);
        bytestream2_put_be16(&pb, 0);
        bytestream2_put_byte(&pb, 0x00); // flags (= no flags)

        // DecoderConfig descriptor
        put_descr(&pb, 0x04, 13 + 5+at->extradata_size);

        // Object type indication
        bytestream2_put_byte(&pb, 0x40);

        bytestream2_put_byte(&pb, 0x15); // flags (= Audiostream)

        bytestream2_put_be24(&pb, 0); // Buffersize DB

        bytestream2_put_be32(&pb, 0); // maxbitrate
        bytestream2_put_be32(&pb, 0); // avgbitrate

        // DecoderSpecific info descriptor
        put_descr(&pb, 0x05, at->extradata_size);
        bytestream2_put_buffer(&pb, at->extradata, at->extradata_size);
        return extradata;
    } else {
        *cookie_size = at->extradata_size;
        return at->extradata;
    }
}
Exemple #4
0
static int vaapi_encode_mjpeg_init_picture_params(AVCodecContext *avctx,
                                                  VAAPIEncodePicture *pic)
{
    VAAPIEncodeMJPEGContext         *priv = avctx->priv_data;
    JPEGRawFrameHeader                *fh = &priv->frame_header;
    JPEGRawScanHeader                 *sh = &priv->scan.header;
    VAEncPictureParameterBufferJPEG *vpic = pic->codec_picture_params;
    const AVPixFmtDescriptor *desc;
    const uint8_t *components;
    int t, i, quant_scale, len;

    av_assert0(pic->type == PICTURE_TYPE_IDR);

    desc = av_pix_fmt_desc_get(priv->common.input_frames->sw_format);
    av_assert0(desc);
    if (desc->flags & AV_PIX_FMT_FLAG_RGB)
        components = (uint8_t[3]) { 'R', 'G', 'B' };
    else
        components = (uint8_t[3]) {  1,   2,   3  };

    // Frame header.

    fh->P  = 8;
    fh->Y  = avctx->height;
    fh->X  = avctx->width;
    fh->Nf = desc->nb_components;

    for (i = 0; i < fh->Nf; i++) {
        fh->C[i] = components[i];
        fh->H[i] = 1 + (i == 0 ? desc->log2_chroma_w : 0);
        fh->V[i] = 1 + (i == 0 ? desc->log2_chroma_h : 0);

        fh->Tq[i] = !!i;
    }

    fh->Lf = 8 + 3 * fh->Nf;

    // JFIF header.
    if (priv->jfif) {
        JPEGRawApplicationData *app = &priv->jfif_header;
        AVRational sar = pic->input_image->sample_aspect_ratio;
        int sar_w, sar_h;
        PutByteContext pbc;

        bytestream2_init_writer(&pbc, priv->jfif_data,
                                sizeof(priv->jfif_data));

        bytestream2_put_buffer(&pbc, "JFIF", 5);
        bytestream2_put_be16(&pbc, 0x0102);
        bytestream2_put_byte(&pbc, 0);

        av_reduce(&sar_w, &sar_h, sar.num, sar.den, 65535);
        if (sar_w && sar_h) {
            bytestream2_put_be16(&pbc, sar_w);
            bytestream2_put_be16(&pbc, sar_h);
        } else {
            bytestream2_put_be16(&pbc, 1);
            bytestream2_put_be16(&pbc, 1);
        }

        bytestream2_put_byte(&pbc, 0);
        bytestream2_put_byte(&pbc, 0);

        av_assert0(bytestream2_get_bytes_left_p(&pbc) == 0);

        app->Lp     = 2 + sizeof(priv->jfif_data);
        app->Ap     = priv->jfif_data;
        app->Ap_ref = NULL;
    }

    // Quantisation tables.

    if (priv->quality < 50)
        quant_scale = 5000 / priv->quality;
    else
        quant_scale = 200 - 2 * priv->quality;

    len = 2;

    for (t = 0; t < 1 + (fh->Nf > 1); t++) {
        JPEGRawQuantisationTable *quant = &priv->quant_tables.table[t];
        const uint8_t *data = t == 0 ?
            vaapi_encode_mjpeg_quant_luminance :
            vaapi_encode_mjpeg_quant_chrominance;

        quant->Pq = 0;
        quant->Tq = t;
        for (i = 0; i < 64; i++)
            quant->Q[i] = av_clip(data[i] * quant_scale / 100, 1, 255);

        len += 65;
    }

    priv->quant_tables.Lq = len;

    // Huffman tables.

    len = 2;

    for (t = 0; t < 2 + 2 * (fh->Nf > 1); t++) {
        JPEGRawHuffmanTable *huff = &priv->huffman_tables.table[t];
        const uint8_t *lengths, *values;
        int k;

        switch (t) {
        case 0:
            lengths = avpriv_mjpeg_bits_dc_luminance + 1;
            values  = avpriv_mjpeg_val_dc;
            break;
        case 1:
            lengths = avpriv_mjpeg_bits_ac_luminance + 1;
            values  = avpriv_mjpeg_val_ac_luminance;
            break;
        case 2:
            lengths = avpriv_mjpeg_bits_dc_chrominance + 1;
            values  = avpriv_mjpeg_val_dc;
            break;
        case 3:
            lengths = avpriv_mjpeg_bits_ac_chrominance + 1;
            values  = avpriv_mjpeg_val_ac_chrominance;
            break;
        }

        huff->Tc = t % 2;
        huff->Th = t / 2;

        for (i = k = 0; i < 16; i++)
            k += (huff->L[i] = lengths[i]);

        for (i = 0; i < k; i++)
            huff->V[i] = values[i];

        len += 17 + k;
    }

    priv->huffman_tables.Lh = len;

    // Scan header.

    sh->Ns = fh->Nf;

    for (i = 0; i < fh->Nf; i++) {
        sh->Cs[i] = fh->C[i];
        sh->Td[i] = i > 0;
        sh->Ta[i] = i > 0;
    }

    sh->Ss = 0;
    sh->Se = 63;
    sh->Ah = 0;
    sh->Al = 0;

    sh->Ls = 6 + 2 * sh->Ns;


    *vpic = (VAEncPictureParameterBufferJPEG) {
        .reconstructed_picture = pic->recon_surface,
        .coded_buf             = pic->output_buffer,

        .picture_width  = fh->X,
        .picture_height = fh->Y,

        .pic_flags.bits = {
            .profile      = 0,
            .progressive  = 0,
            .huffman      = 1,
            .interleaved  = 0,
            .differential = 0,
        },

        .sample_bit_depth = fh->P,
        .num_scan         = 1,
        .num_components   = fh->Nf,

        // The driver modifies the provided quantisation tables according
        // to this quality value; the middle value of 50 makes that the
        // identity so that they are used unchanged.
        .quality = 50,
    };

    for (i = 0; i < fh->Nf; i++) {
        vpic->component_id[i]             = fh->C[i];
        vpic->quantiser_table_selector[i] = fh->Tq[i];
    }

    pic->nb_slices = 1;

    return 0;
}
Exemple #5
0
static int jpeg_create_header(uint8_t *buf, int size, uint32_t type, uint32_t w,
                              uint32_t h, const uint8_t *qtable, int nb_qtable,
                              int dri)
{
    PutByteContext pbc;
    uint8_t *dht_size_ptr;
    int dht_size, i;

    bytestream2_init_writer(&pbc, buf, size);

    /* Convert from blocks to pixels. */
    w <<= 3;
    h <<= 3;

    /* SOI */
    jpeg_put_marker(&pbc, SOI);

    /* JFIF header */
    jpeg_put_marker(&pbc, APP0);
    bytestream2_put_be16(&pbc, 16);
    bytestream2_put_buffer(&pbc, "JFIF", 5);
    bytestream2_put_be16(&pbc, 0x0201);
    bytestream2_put_byte(&pbc, 0);
    bytestream2_put_be16(&pbc, 1);
    bytestream2_put_be16(&pbc, 1);
    bytestream2_put_byte(&pbc, 0);
    bytestream2_put_byte(&pbc, 0);

    if (dri) {
        jpeg_put_marker(&pbc, DRI);
        bytestream2_put_be16(&pbc, 4);
        bytestream2_put_be16(&pbc, dri);
    }

    /* DQT */
    jpeg_put_marker(&pbc, DQT);
    bytestream2_put_be16(&pbc, 2 + nb_qtable * (1 + 64));

    for (i = 0; i < nb_qtable; i++) {
        bytestream2_put_byte(&pbc, i);

        /* Each table is an array of 64 values given in zig-zag
         * order, identical to the format used in a JFIF DQT
         * marker segment. */
        bytestream2_put_buffer(&pbc, qtable + 64 * i, 64);
    }

    /* DHT */
    jpeg_put_marker(&pbc, DHT);
    dht_size_ptr = pbc.buffer;
    bytestream2_put_be16(&pbc, 0);

    dht_size  = 2;
    dht_size += jpeg_create_huffman_table(&pbc, 0, 0,avpriv_mjpeg_bits_dc_luminance,
                                          avpriv_mjpeg_val_dc);
    dht_size += jpeg_create_huffman_table(&pbc, 0, 1, avpriv_mjpeg_bits_dc_chrominance,
                                          avpriv_mjpeg_val_dc);
    dht_size += jpeg_create_huffman_table(&pbc, 1, 0, avpriv_mjpeg_bits_ac_luminance,
                                          avpriv_mjpeg_val_ac_luminance);
    dht_size += jpeg_create_huffman_table(&pbc, 1, 1, avpriv_mjpeg_bits_ac_chrominance,
                                          avpriv_mjpeg_val_ac_chrominance);
    AV_WB16(dht_size_ptr, dht_size);

    /* SOF0 */
    jpeg_put_marker(&pbc, SOF0);
    bytestream2_put_be16(&pbc, 17); /* size */
    bytestream2_put_byte(&pbc, 8); /* bits per component */
    bytestream2_put_be16(&pbc, h);
    bytestream2_put_be16(&pbc, w);
    bytestream2_put_byte(&pbc, 3); /* number of components */
    bytestream2_put_byte(&pbc, 1); /* component number */
    bytestream2_put_byte(&pbc, (2 << 4) | (type ? 2 : 1)); /* hsample/vsample */
    bytestream2_put_byte(&pbc, 0); /* matrix number */
    bytestream2_put_byte(&pbc, 2); /* component number */
    bytestream2_put_byte(&pbc, 1 << 4 | 1); /* hsample/vsample */
    bytestream2_put_byte(&pbc, nb_qtable == 2 ? 1 : 0); /* matrix number */
    bytestream2_put_byte(&pbc, 3); /* component number */
    bytestream2_put_byte(&pbc, 1 << 4 | 1); /* hsample/vsample */
    bytestream2_put_byte(&pbc, nb_qtable == 2 ? 1 : 0); /* matrix number */

    /* SOS */
    jpeg_put_marker(&pbc, SOS);
    bytestream2_put_be16(&pbc, 12);
    bytestream2_put_byte(&pbc, 3);
    bytestream2_put_byte(&pbc, 1);
    bytestream2_put_byte(&pbc, 0);
    bytestream2_put_byte(&pbc, 2);
    bytestream2_put_byte(&pbc, 17);
    bytestream2_put_byte(&pbc, 3);
    bytestream2_put_byte(&pbc, 17);
    bytestream2_put_byte(&pbc, 0);
    bytestream2_put_byte(&pbc, 63);
    bytestream2_put_byte(&pbc, 0);

    /* Return the length in bytes of the JPEG header. */
    return bytestream2_tell_p(&pbc);
}