コード例 #1
0
ファイル: mjpegenc_common.c プロジェクト: DaveDaCoda/mythtv
static void jpeg_table_header(AVCodecContext *avctx, PutBitContext *p,
                              ScanTable *intra_scantable,
                              uint16_t luma_intra_matrix[64],
                              uint16_t chroma_intra_matrix[64],
                              int hsample[3])
{
    int i, j, size;
    uint8_t *ptr;
    MpegEncContext *s = avctx->priv_data;

    if (avctx->codec_id != AV_CODEC_ID_LJPEG) {
        int matrix_count = 1 + !!memcmp(luma_intra_matrix,
                                        chroma_intra_matrix,
                                        sizeof(luma_intra_matrix[0]) * 64);
    if (s->force_duplicated_matrix)
        matrix_count = 2;
    /* quant matrixes */
    put_marker(p, DQT);
    put_bits(p, 16, 2 + matrix_count * (1 + 64));
    put_bits(p, 4, 0); /* 8 bit precision */
    put_bits(p, 4, 0); /* table 0 */
    for(i=0;i<64;i++) {
        j = intra_scantable->permutated[i];
        put_bits(p, 8, luma_intra_matrix[j]);
    }

        if (matrix_count > 1) {
            put_bits(p, 4, 0); /* 8 bit precision */
            put_bits(p, 4, 1); /* table 1 */
            for(i=0;i<64;i++) {
                j = intra_scantable->permutated[i];
                put_bits(p, 8, chroma_intra_matrix[j]);
            }
        }
    }

    if(avctx->active_thread_type & FF_THREAD_SLICE){
        put_marker(p, DRI);
        put_bits(p, 16, 4);
        put_bits(p, 16, (avctx->width-1)/(8*hsample[0]) + 1);
    }

    /* huffman table */
    put_marker(p, DHT);
    flush_put_bits(p);
    ptr = put_bits_ptr(p);
    put_bits(p, 16, 0); /* patched later */
    size = 2;
    size += put_huffman_table(p, 0, 0, avpriv_mjpeg_bits_dc_luminance,
                              avpriv_mjpeg_val_dc);
    size += put_huffman_table(p, 0, 1, avpriv_mjpeg_bits_dc_chrominance,
                              avpriv_mjpeg_val_dc);

    size += put_huffman_table(p, 1, 0, avpriv_mjpeg_bits_ac_luminance,
                              avpriv_mjpeg_val_ac_luminance);
    size += put_huffman_table(p, 1, 1, avpriv_mjpeg_bits_ac_chrominance,
                              avpriv_mjpeg_val_ac_chrominance);
    AV_WB16(ptr, size);
}
コード例 #2
0
ファイル: mjpegenc_common.c プロジェクト: DaveDaCoda/mythtv
static void jpeg_put_comments(AVCodecContext *avctx, PutBitContext *p)
{
    int size;
    uint8_t *ptr;

    if (avctx->sample_aspect_ratio.num > 0 && avctx->sample_aspect_ratio.den > 0) {
        /* JFIF header */
        put_marker(p, APP0);
        put_bits(p, 16, 16);
        avpriv_put_string(p, "JFIF", 1); /* this puts the trailing zero-byte too */
        /* The most significant byte is used for major revisions, the least
         * significant byte for minor revisions. Version 1.02 is the current
         * released revision. */
        put_bits(p, 16, 0x0102);
        put_bits(p,  8, 0);              /* units type: 0 - aspect ratio */
        put_bits(p, 16, avctx->sample_aspect_ratio.num);
        put_bits(p, 16, avctx->sample_aspect_ratio.den);
        put_bits(p, 8, 0); /* thumbnail width */
        put_bits(p, 8, 0); /* thumbnail height */
    }

    /* comment */
    if (!(avctx->flags & AV_CODEC_FLAG_BITEXACT)) {
        put_marker(p, COM);
        flush_put_bits(p);
        ptr = put_bits_ptr(p);
        put_bits(p, 16, 0); /* patched later */
        avpriv_put_string(p, LIBAVCODEC_IDENT, 1);
        size = strlen(LIBAVCODEC_IDENT)+3;
        AV_WB16(ptr, size);
    }

    if (((avctx->pix_fmt == AV_PIX_FMT_YUV420P ||
          avctx->pix_fmt == AV_PIX_FMT_YUV422P ||
          avctx->pix_fmt == AV_PIX_FMT_YUV444P) && avctx->color_range != AVCOL_RANGE_JPEG)
        || avctx->color_range == AVCOL_RANGE_MPEG) {
        put_marker(p, COM);
        flush_put_bits(p);
        ptr = put_bits_ptr(p);
        put_bits(p, 16, 0); /* patched later */
        avpriv_put_string(p, "CS=ITU601", 1);
        size = strlen("CS=ITU601")+3;
        AV_WB16(ptr, size);
    }
}
コード例 #3
0
ファイル: mjpegenc.c プロジェクト: 0Soul/FFmpeg
static void jpeg_put_comments(AVCodecContext *avctx, PutBitContext *p)
{
    int size;
    uint8_t *ptr;

    if (avctx->sample_aspect_ratio.num > 0 && avctx->sample_aspect_ratio.den > 0) {
        /* JFIF header */
        put_marker(p, APP0);
        put_bits(p, 16, 16);
        avpriv_put_string(p, "JFIF", 1); /* this puts the trailing zero-byte too */
        put_bits(p, 16, 0x0102);         /* v 1.02 */
        put_bits(p,  8, 0);              /* units type: 0 - aspect ratio */
        put_bits(p, 16, avctx->sample_aspect_ratio.num);
        put_bits(p, 16, avctx->sample_aspect_ratio.den);
        put_bits(p, 8, 0); /* thumbnail width */
        put_bits(p, 8, 0); /* thumbnail height */
    }

    /* comment */
    if (!(avctx->flags & CODEC_FLAG_BITEXACT)) {
        put_marker(p, COM);
        flush_put_bits(p);
        ptr = put_bits_ptr(p);
        put_bits(p, 16, 0); /* patched later */
        avpriv_put_string(p, LIBAVCODEC_IDENT, 1);
        size = strlen(LIBAVCODEC_IDENT)+3;
        AV_WB16(ptr, size);
    }

    if (avctx->pix_fmt == AV_PIX_FMT_YUV420P ||
        avctx->pix_fmt == AV_PIX_FMT_YUV422P ||
        avctx->pix_fmt == AV_PIX_FMT_YUV444P) {
        put_marker(p, COM);
        flush_put_bits(p);
        ptr = put_bits_ptr(p);
        put_bits(p, 16, 0); /* patched later */
        avpriv_put_string(p, "CS=ITU601", 1);
        size = strlen("CS=ITU601")+3;
        AV_WB16(ptr, size);
    }
}
コード例 #4
0
ファイル: rtpdec_jpeg.c プロジェクト: cmtsij/FFmpeg
static int jpeg_create_header(uint8_t *buf, int size, uint32_t type, uint32_t w,
                              uint32_t h, const uint8_t *qtable, int nb_qtable)
{
    PutBitContext pbc;

    init_put_bits(&pbc, buf, size);

    /* Convert from blocks to pixels. */
    w <<= 3;
    h <<= 3;

    /* SOI */
    put_marker(&pbc, SOI);

    /* JFIF header */
    put_marker(&pbc, APP0);
    put_bits(&pbc, 16, 16);
    avpriv_put_string(&pbc, "JFIF", 1);
    put_bits(&pbc, 16, 0x0201);
    put_bits(&pbc, 8, 0);
    put_bits(&pbc, 16, 1);
    put_bits(&pbc, 16, 1);
    put_bits(&pbc, 8, 0);
    put_bits(&pbc, 8, 0);

    /* DQT */
    put_marker(&pbc, DQT);
    if (nb_qtable == 2) {
        put_bits(&pbc, 16, 2 + 2 * (1 + 64));
    } else {
        put_bits(&pbc, 16, 2 + 1 * (1 + 64));
    }
    put_bits(&pbc, 8, 0);

    /* Each table is an array of 64 values given in zig-zag
     * order, identical to the format used in a JFIF DQT
     * marker segment. */
    avpriv_copy_bits(&pbc, qtable, 64 * 8);

    if (nb_qtable == 2) {
        put_bits(&pbc, 8, 1);
        avpriv_copy_bits(&pbc, qtable + 64, 64 * 8);
    }

    /* DHT */
    put_marker(&pbc, DHT);

    jpeg_create_huffman_table(&pbc, 0, 0, avpriv_mjpeg_bits_dc_luminance,
                              avpriv_mjpeg_val_dc);
    jpeg_create_huffman_table(&pbc, 0, 1, avpriv_mjpeg_bits_dc_chrominance,
                              avpriv_mjpeg_val_dc);
    jpeg_create_huffman_table(&pbc, 1, 0, avpriv_mjpeg_bits_ac_luminance,
                              avpriv_mjpeg_val_ac_luminance);
    jpeg_create_huffman_table(&pbc, 1, 1, avpriv_mjpeg_bits_ac_chrominance,
                              avpriv_mjpeg_val_ac_chrominance);

    /* SOF0 */
    put_marker(&pbc, SOF0);
    put_bits(&pbc, 16, 17);
    put_bits(&pbc, 8, 8);
    put_bits(&pbc, 8, h >> 8);
    put_bits(&pbc, 8, h);
    put_bits(&pbc, 8, w >> 8);
    put_bits(&pbc, 8, w);
    put_bits(&pbc, 8, 3);
    put_bits(&pbc, 8, 1);
    put_bits(&pbc, 8, type ? 34 : 33);
    put_bits(&pbc, 8, 0);
    put_bits(&pbc, 8, 2);
    put_bits(&pbc, 8, 17);
    put_bits(&pbc, 8, nb_qtable == 2 ? 1 : 0);
    put_bits(&pbc, 8, 3);
    put_bits(&pbc, 8, 17);
    put_bits(&pbc, 8, nb_qtable == 2 ? 1 : 0);

    /* SOS */
    put_marker(&pbc, SOS);
    put_bits(&pbc, 16, 12);
    put_bits(&pbc, 8, 3);
    put_bits(&pbc, 8, 1);
    put_bits(&pbc, 8, 0);
    put_bits(&pbc, 8, 2);
    put_bits(&pbc, 8, 17);
    put_bits(&pbc, 8, 3);
    put_bits(&pbc, 8, 17);
    put_bits(&pbc, 8, 0);
    put_bits(&pbc, 8, 63);
    put_bits(&pbc, 8, 0);

    /* Fill the buffer. */
    flush_put_bits(&pbc);

    /* Return the length in bytes of the JPEG header. */
    return put_bits_count(&pbc) / 8;
}
コード例 #5
0
ファイル: rtpdec_jpeg.c プロジェクト: cmtsij/FFmpeg
static int jpeg_parse_packet(AVFormatContext *ctx, PayloadContext *jpeg,
                             AVStream *st, AVPacket *pkt, uint32_t *timestamp,
                             const uint8_t *buf, int len, int flags)
{
    uint8_t type, q, width, height;
    const uint8_t *qtables = NULL;
    uint16_t qtable_len;
    uint32_t off;
    int ret;

    if (len < 8) {
        av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
        return AVERROR_INVALIDDATA;
    }

    /* Parse the main JPEG header. */
    off    = AV_RB24(buf + 1);  /* fragment byte offset */
    type   = AV_RB8(buf + 4);   /* id of jpeg decoder params */
    q      = AV_RB8(buf + 5);   /* quantization factor (or table id) */
    width  = AV_RB8(buf + 6);   /* frame width in 8 pixel blocks */
    height = AV_RB8(buf + 7);   /* frame height in 8 pixel blocks */
    buf += 8;
    len -= 8;

    /* Parse the restart marker header. */
    if (type > 63) {
        av_log(ctx, AV_LOG_ERROR,
               "Unimplemented RTP/JPEG restart marker header.\n");
        return AVERROR_PATCHWELCOME;
    }

    /* Parse the quantization table header. */
    if (q > 127 && off == 0) {
        uint8_t precision;

        if (len < 4) {
            av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
            return AVERROR_INVALIDDATA;
        }

        /* The first byte is reserved for future use. */
        precision  = AV_RB8(buf + 1);    /* size of coefficients */
        qtable_len = AV_RB16(buf + 2);   /* length in bytes */
        buf += 4;
        len -= 4;

        if (precision)
            av_log(ctx, AV_LOG_WARNING, "Only 8-bit precision is supported.\n");

        if (q == 255 && qtable_len == 0) {
            av_log(ctx, AV_LOG_ERROR,
                   "Invalid RTP/JPEG packet. Quantization tables not found.\n");
            return AVERROR_INVALIDDATA;
        }

        if (qtable_len > 0) {
            if (len < qtable_len) {
                av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
                return AVERROR_INVALIDDATA;
            }
            qtables = buf;
            buf += qtable_len;
            len -= qtable_len;
        }
    }

    if (off == 0) {
        /* Start of JPEG data packet. */
        uint8_t new_qtables[128];
        uint8_t hdr[1024];

        /* Skip the current frame in case of the end packet
         * has been lost somewhere. */
        free_frame_if_needed(jpeg);

        if ((ret = avio_open_dyn_buf(&jpeg->frame)) < 0)
            return ret;
        jpeg->timestamp = *timestamp;

        if (!qtables) {
            create_default_qtables(new_qtables, q);
            qtables    = new_qtables;
            qtable_len = sizeof(new_qtables);
        }

        /* Generate a frame and scan headers that can be prepended to the
         * RTP/JPEG data payload to produce a JPEG compressed image in
         * interchange format. */
        jpeg->hdr_size = jpeg_create_header(hdr, sizeof(hdr), type, width,
                                            height, qtables,
                                            qtable_len > 64 ? 2 : 1);

        /* Copy JPEG header to frame buffer. */
        avio_write(jpeg->frame, hdr, jpeg->hdr_size);
    }

    if (!jpeg->frame) {
        av_log(ctx, AV_LOG_ERROR,
               "Received packet without a start chunk; dropping frame.\n");
        return AVERROR(EAGAIN);
    }

    if (jpeg->timestamp != *timestamp) {
        /* Skip the current frame if timestamp is incorrect.
         * A start packet has been lost somewhere. */
        free_frame_if_needed(jpeg);
        av_log(ctx, AV_LOG_ERROR, "RTP timestamps don't match.\n");
        return AVERROR_INVALIDDATA;
    }

    if (off != avio_tell(jpeg->frame) - jpeg->hdr_size) {
        av_log(ctx, AV_LOG_ERROR,
               "Missing packets; dropping frame.\n");
        return AVERROR(EAGAIN);
    }

    /* Copy data to frame buffer. */
    avio_write(jpeg->frame, buf, len);

    if (flags & RTP_FLAG_MARKER) {
        /* End of JPEG data packet. */
        PutBitContext pbc;
        uint8_t buf[2];

        /* Put EOI marker. */
        init_put_bits(&pbc, buf, sizeof(buf));
        put_marker(&pbc, EOI);
        flush_put_bits(&pbc);
        avio_write(jpeg->frame, buf, sizeof(buf));

        /* Prepare the JPEG packet. */
        av_init_packet(pkt);
        pkt->size = avio_close_dyn_buf(jpeg->frame, &pkt->data);
        if (pkt->size < 0) {
            av_log(ctx, AV_LOG_ERROR,
                   "Error occured when getting frame buffer.\n");
            jpeg->frame = NULL;
            return pkt->size;
        }
        pkt->stream_index = st->index;
        pkt->destruct     = av_destruct_packet;

        /* Re-init the frame buffer. */
        jpeg->frame = NULL;

        return 0;
    }

    return AVERROR(EAGAIN);
}
コード例 #6
0
ファイル: mjpegenc.c プロジェクト: 0Soul/FFmpeg
void ff_mjpeg_encode_picture_header(AVCodecContext *avctx, PutBitContext *pb,
                                    ScanTable *intra_scantable,
                                    uint16_t luma_intra_matrix[64],
                                    uint16_t chroma_intra_matrix[64])
{
    const int lossless = avctx->codec_id != AV_CODEC_ID_MJPEG && avctx->codec_id != AV_CODEC_ID_AMV;
    int hsample[3], vsample[3];
    int i;
    int chroma_matrix = !!memcmp(luma_intra_matrix,
                                 chroma_intra_matrix,
                                 sizeof(luma_intra_matrix[0])*64);

    ff_mjpeg_init_hvsample(avctx, hsample, vsample);

    put_marker(pb, SOI);

    // hack for AMV mjpeg format
    if(avctx->codec_id == AV_CODEC_ID_AMV) goto end;

    jpeg_put_comments(avctx, pb);

    jpeg_table_header(avctx, pb, intra_scantable, luma_intra_matrix, chroma_intra_matrix, hsample);

    switch (avctx->codec_id) {
    case AV_CODEC_ID_MJPEG:  put_marker(pb, SOF0 ); break;
    case AV_CODEC_ID_LJPEG:  put_marker(pb, SOF3 ); break;
    default: av_assert0(0);
    }

    put_bits(pb, 16, 17);
    if (lossless && (  avctx->pix_fmt == AV_PIX_FMT_BGR0
                    || avctx->pix_fmt == AV_PIX_FMT_BGRA
                    || avctx->pix_fmt == AV_PIX_FMT_BGR24))
        put_bits(pb, 8, 9); /* 9 bits/component RCT */
    else
        put_bits(pb, 8, 8); /* 8 bits/component */
    put_bits(pb, 16, avctx->height);
    put_bits(pb, 16, avctx->width);
    put_bits(pb, 8, 3); /* 3 components */

    /* Y component */
    put_bits(pb, 8, 1); /* component number */
    put_bits(pb, 4, hsample[0]); /* H factor */
    put_bits(pb, 4, vsample[0]); /* V factor */
    put_bits(pb, 8, 0); /* select matrix */

    /* Cb component */
    put_bits(pb, 8, 2); /* component number */
    put_bits(pb, 4, hsample[1]); /* H factor */
    put_bits(pb, 4, vsample[1]); /* V factor */
    put_bits(pb, 8, lossless ? 0 : chroma_matrix); /* select matrix */

    /* Cr component */
    put_bits(pb, 8, 3); /* component number */
    put_bits(pb, 4, hsample[2]); /* H factor */
    put_bits(pb, 4, vsample[2]); /* V factor */
    put_bits(pb, 8, lossless ? 0 : chroma_matrix); /* select matrix */

    /* scan header */
    put_marker(pb, SOS);
    put_bits(pb, 16, 12); /* length */
    put_bits(pb, 8, 3); /* 3 components */

    /* Y component */
    put_bits(pb, 8, 1); /* index */
    put_bits(pb, 4, 0); /* DC huffman table index */
    put_bits(pb, 4, 0); /* AC huffman table index */

    /* Cb component */
    put_bits(pb, 8, 2); /* index */
    put_bits(pb, 4, 1); /* DC huffman table index */
    put_bits(pb, 4, lossless ? 0 : 1); /* AC huffman table index */

    /* Cr component */
    put_bits(pb, 8, 3); /* index */
    put_bits(pb, 4, 1); /* DC huffman table index */
    put_bits(pb, 4, lossless ? 0 : 1); /* AC huffman table index */

    put_bits(pb, 8, lossless ? avctx->prediction_method + 1 : 0); /* Ss (not used) */

    switch (avctx->codec_id) {
    case AV_CODEC_ID_MJPEG:  put_bits(pb, 8, 63); break; /* Se (not used) */
    case AV_CODEC_ID_LJPEG:  put_bits(pb, 8,  0); break; /* not used */
    default: av_assert0(0);
    }

    put_bits(pb, 8, 0); /* Ah/Al (not used) */

end:
    if (!lossless) {
        MpegEncContext *s = avctx->priv_data;
        av_assert0(avctx->codec->priv_data_size == sizeof(MpegEncContext));

        s->esc_pos = put_bits_count(pb) >> 3;
        for(i=1; i<s->slice_context_count; i++)
            s->thread_context[i]->esc_pos = 0;
    }