Esempio n. 1
0
void ff_dirac_init_arith_decoder(DiracArith *c, GetBitContext *gb, int length)
{
    int i;
    align_get_bits(gb);

    length = FFMIN(length, get_bits_left(gb)/8);

    c->bytestream     = gb->buffer + get_bits_count(gb)/8;
    c->bytestream_end = c->bytestream + length;
    skip_bits_long(gb, length*8);

    c->low = 0;
    for (i = 0; i < 4; i++) {
        c->low <<= 8;
        if (c->bytestream < c->bytestream_end)
            c->low |= *c->bytestream++;
        else
            c->low |= 0xff;
    }

    c->counter = -16;
    c->range   = 0xffff;

    for (i = 0; i < 256; i++) {
        ff_dirac_prob_branchless[i][0] =  ff_dirac_prob[255-i];
        ff_dirac_prob_branchless[i][1] = -ff_dirac_prob[i];
    }

    for (i = 0; i < DIRAC_CTX_COUNT; i++)
        c->contexts[i] = 0x8000;
}
Esempio n. 2
0
void lag_rac_init(lag_rac *l, GetBitContext *gb, int length)
{
    int i, j;

    /* According to reference decoder "1st byte is garbage",
     * however, it gets skipped by the call to align_get_bits()
     */
    align_get_bits(gb);
    l->bytestream_start =
        l->bytestream       = gb->buffer + get_bits_count(gb) / 8;
    l->bytestream_end   = l->bytestream_start + length;

    l->range        = 0x80;
    l->low          = *l->bytestream >> 1;
    l->hash_shift   = FFMAX(l->scale - 8, 0);

    for (i = j = 0; i < 256; i++) {
        unsigned r = i << l->hash_shift;
        while (l->prob[j + 1] <= r)
            j++;
        l->range_hash[i] = j;
    }

    /* Add conversion factor to hash_shift so we don't have to in lag_get_rac. */
    l->hash_shift += 23;
}
Esempio n. 3
0
void ff_hevc_cabac_init(HEVCContext *s)
{
    int i;
    int init_type;
    GetBitContext *gb = &s->gb;

    align_get_bits(gb);
    ff_init_cabac_states(&s->cc);
    ff_init_cabac_decoder(&s->cc,
                          gb->buffer + get_bits_count(gb) / 8,
                          (get_bits_left(&s->gb) + 7) / 8);

    init_type = 2 - s->sh.slice_type;
    if (s->sh.cabac_init_flag && s->sh.slice_type != I_SLICE)
        init_type ^= 3;

    elem_offset[0] = 0;
    for (i = 1; i < sizeof(num_bins_in_se); i++) {
        elem_offset[i] = elem_offset[i-1] + num_bins_in_se[i-1];
    }

    for (i = 0; i < HEVC_CONTEXTS; i++) {
        int init_value = init_values[init_type][i];
        int m = (init_value >> 4)*5 - 45;
        int n = ((init_value & 15) << 3) - 16;
        int pre_ctx_state = av_clip_c(((m * av_clip_c(s->sh.slice_qp, 0, 51)) >> 4) + n,
                                      1, 126);
        int mps = (pre_ctx_state <= 63) ? 0 : 1;
        int state_idx = mps ? (pre_ctx_state - 64) : (63 - pre_ctx_state);
        s->cabac_state[i] = (state_idx << 1) + mps;
    }
}
Esempio n. 4
0
int ff_copy_pce_data(PutBitContext *pb, GetBitContext *gb)
{
    int five_bit_ch, four_bit_ch, comment_size, bits;
    int offset = put_bits_count(pb);

    copy_bits(pb, gb, 10);                  //Tag, Object Type, Frequency
    five_bit_ch  = copy_bits(pb, gb, 4);    //Front
    five_bit_ch += copy_bits(pb, gb, 4);    //Side
    five_bit_ch += copy_bits(pb, gb, 4);    //Back
    four_bit_ch  = copy_bits(pb, gb, 2);    //LFE
    four_bit_ch += copy_bits(pb, gb, 3);    //Data
    five_bit_ch += copy_bits(pb, gb, 4);    //Coupling
    if (copy_bits(pb, gb, 1))               //Mono Mixdown
        copy_bits(pb, gb, 4);
    if (copy_bits(pb, gb, 1))               //Stereo Mixdown
        copy_bits(pb, gb, 4);
    if (copy_bits(pb, gb, 1))               //Matrix Mixdown
        copy_bits(pb, gb, 3);
    for (bits = five_bit_ch*5+four_bit_ch*4; bits > 16; bits -= 16)
        copy_bits(pb, gb, 16);
    if (bits)
        copy_bits(pb, gb, bits);
    align_put_bits(pb);
    align_get_bits(gb);
    comment_size = copy_bits(pb, gb, 8);
    for (; comment_size > 0; comment_size--)
        copy_bits(pb, gb, 8);

    return put_bits_count(pb) - offset;
}
Esempio n. 5
0
int ff_ccitt_unpack(AVCodecContext *avctx,
                    const uint8_t *src, int srcsize,
                    uint8_t *dst, int height, int stride,
                    enum TiffCompr compr, int opts)
{
    int j;
    GetBitContext gb;
    int *runs, *ref = NULL, *runend;
    int ret;
    int runsize= avctx->width + 2;
    int err = 0;
    int has_eol;

    runs = av_malloc(runsize * sizeof(runs[0]));
    ref  = av_malloc(runsize * sizeof(ref[0]));
    if (!runs || ! ref) {
        err = AVERROR(ENOMEM);
        goto fail;
    }
    ref[0] = avctx->width;
    ref[1] = 0;
    ref[2] = 0;
    init_get_bits(&gb, src, srcsize*8);
    has_eol = show_bits(&gb, 12) == 1 || show_bits(&gb, 16) == 1;

    for(j = 0; j < height; j++){
        runend = runs + runsize;
        if(compr == TIFF_G4){
            ret = decode_group3_2d_line(avctx, &gb, avctx->width, runs, runend, ref);
            if(ret < 0){
                err = -1;
                goto fail;
            }
        }else{
            int g3d1 = (compr == TIFF_G3) && !(opts & 1);
            if(compr!=TIFF_CCITT_RLE && has_eol && find_group3_syncmarker(&gb, srcsize*8) < 0)
                break;
            if(compr==TIFF_CCITT_RLE || g3d1 || get_bits1(&gb))
                ret = decode_group3_1d_line(avctx, &gb, avctx->width, runs, runend);
            else
                ret = decode_group3_2d_line(avctx, &gb, avctx->width, runs, runend, ref);
            if(compr==TIFF_CCITT_RLE)
                align_get_bits(&gb);
        }
        if(ret < 0){
            put_line(dst, stride, avctx->width, ref);
        }else{
            put_line(dst, stride, avctx->width, runs);
            FFSWAP(int*, runs, ref);
        }
        dst += stride;
    }
fail:
    av_free(runs);
    av_free(ref);
    return err;
}
Esempio n. 6
0
static void cabac_init_decoder(HEVCContext *s)
{
    GetBitContext *gb = &s->HEVClc.gb;
    skip_bits(gb, 1);
    align_get_bits(gb);
    ff_init_cabac_decoder(&s->HEVClc.cc,
                          gb->buffer + get_bits_count(gb) / 8,
                          (get_bits_left(gb) + 7) / 8);
}
Esempio n. 7
0
int ff_h264_decode_sei(H264Context *h){
    while (get_bits_left(&h->gb) > 16) {
        int size, type;

        type=0;
        do{
            if (get_bits_left(&h->gb) < 8)
                return -1;
            type+= show_bits(&h->gb, 8);
        }while(get_bits(&h->gb, 8) == 255);

        size=0;
        do{
            if (get_bits_left(&h->gb) < 8)
                return -1;
            size+= show_bits(&h->gb, 8);
        }while(get_bits(&h->gb, 8) == 255);

        if(h->avctx->debug&FF_DEBUG_STARTCODE)
            av_log(h->avctx, AV_LOG_DEBUG, "SEI %d len:%d\n", type, size);

        switch(type){
        case SEI_TYPE_PIC_TIMING: // Picture timing SEI
            if(decode_picture_timing(h) < 0)
                return -1;
            break;
        case SEI_TYPE_USER_DATA_ITU_T_T35:
            if(decode_user_data_itu_t_t35(h, size) < 0)
                return -1;
            break;
        case SEI_TYPE_USER_DATA_UNREGISTERED:
            if(decode_unregistered_user_data(h, size) < 0)
                return -1;
            break;
        case SEI_TYPE_RECOVERY_POINT:
            if(decode_recovery_point(h) < 0)
                return -1;
            break;
        case SEI_BUFFERING_PERIOD:
            if(decode_buffering_period(h) < 0)
                return -1;
            break;
        case SEI_TYPE_FRAME_PACKING:
            if(decode_frame_packing(h, size) < 0)
                return -1;
        default:
            skip_bits(&h->gb, 8*size);
        }

        //FIXME check bits here
        align_get_bits(&h->gb);
    }

    return 0;
}
Esempio n. 8
0
/**
 * Decode the group of blocks / video packet header.
 * @return bit position of the resync_marker, or <0 if none was found
 */
int ff_h263_resync(MpegEncContext *s){
    int left, pos, ret;

    if(s->codec_id==AV_CODEC_ID_MPEG4){
        skip_bits1(&s->gb);
        align_get_bits(&s->gb);
    }

    if(show_bits(&s->gb, 16)==0){
        pos= get_bits_count(&s->gb);
        if(CONFIG_MPEG4_DECODER && s->codec_id==AV_CODEC_ID_MPEG4)
            ret= ff_mpeg4_decode_video_packet_header(s->avctx->priv_data);
        else
            ret= h263_decode_gob_header(s);
        if(ret>=0)
            return pos;
    }
    //OK, it's not where it is supposed to be ...
    s->gb= s->last_resync_gb;
    align_get_bits(&s->gb);
    left= get_bits_left(&s->gb);

    for(;left>16+1+5+5; left-=8){
        if(show_bits(&s->gb, 16)==0){
            GetBitContext bak= s->gb;

            pos= get_bits_count(&s->gb);
            if(CONFIG_MPEG4_DECODER && s->codec_id==AV_CODEC_ID_MPEG4)
                ret= ff_mpeg4_decode_video_packet_header(s->avctx->priv_data);
            else
                ret= h263_decode_gob_header(s);
            if(ret>=0)
                return pos;

            s->gb= bak;
        }
        skip_bits(&s->gb, 8);
    }

    return -1;
}
Esempio n. 9
0
int ff_h264_decode_sei(H264Context *h)
{
    while (get_bits_left(&h->gb) > 16) {
        int size = 0;
        int type = 0;
        int ret  = 0;

        do
            type += show_bits(&h->gb, 8);
        while (get_bits(&h->gb, 8) == 255);

        do
            size += show_bits(&h->gb, 8);
        while (get_bits(&h->gb, 8) == 255);

        if (size > get_bits_left(&h->gb) / 8) {
            av_log(h->avctx, AV_LOG_ERROR, "SEI type %d truncated at %d\n",
                   type, get_bits_left(&h->gb));
            return AVERROR_INVALIDDATA;
        }

        switch (type) {
        case SEI_TYPE_PIC_TIMING: // Picture timing SEI
            ret = decode_picture_timing(h);
            if (ret < 0)
                return ret;
            break;
        case SEI_TYPE_USER_DATA_UNREGISTERED:
            ret = decode_unregistered_user_data(h, size);
            if (ret < 0)
                return ret;
            break;
        case SEI_TYPE_RECOVERY_POINT:
            ret = decode_recovery_point(h);
            if (ret < 0)
                return ret;
            break;
        case SEI_BUFFERING_PERIOD:
            ret = decode_buffering_period(h);
            if (ret < 0)
                return ret;
            break;
        default:
            av_log(h->avctx, AV_LOG_DEBUG, "unknown SEI type %d\n", type);
            skip_bits(&h->gb, 8 * size);
        }

        // FIXME check bits here
        align_get_bits(&h->gb);
    }

    return 0;
}
Esempio n. 10
0
int ff_h264_decode_sei(H264Context *h)
{
    while (get_bits_left(&h->gb) > 16) {
        int size = 0;
        int type = 0;
        int ret  = 0;

        do
            type += show_bits(&h->gb, 8);
        while (get_bits(&h->gb, 8) == 255);

        do
            size += show_bits(&h->gb, 8);
        while (get_bits(&h->gb, 8) == 255);

        switch (type) {
        case SEI_TYPE_PIC_TIMING: // Picture timing SEI
            ret = decode_picture_timing(h);
            if (ret < 0)
                return ret;
            break;
        case SEI_TYPE_USER_DATA_UNREGISTERED:
            ret = decode_unregistered_user_data(h, size);
            if (ret < 0)
                return ret;
            break;
        case SEI_TYPE_RECOVERY_POINT:
            ret = decode_recovery_point(h);
            if (ret < 0)
                return ret;
            break;
        case SEI_BUFFERING_PERIOD:
            ret = decode_buffering_period(h);
            if (ret < 0)
                return ret;
            break;
        default:
            skip_bits(&h->gb, 8 * size);
        }

        // FIXME check bits here
        align_get_bits(&h->gb);
    }

    return 0;
}
Esempio n. 11
0
int ff_h264_decode_sei(H264Context *h){
    MpegEncContext * const s = &h->s;

    while(get_bits_count(&s->gb) + 16 < s->gb.size_in_bits){
        int size, type;

        type=0;
        do{
            type+= show_bits(&s->gb, 8);
        }while(get_bits(&s->gb, 8) == 255);

        size=0;
        do{
            size+= show_bits(&s->gb, 8);
        }while(get_bits(&s->gb, 8) == 255);

        switch(type){
        case SEI_TYPE_PIC_TIMING: // Picture timing SEI
            if(decode_picture_timing(h) < 0)
                return -1;
            break;
        case SEI_TYPE_USER_DATA_UNREGISTERED:
            if(decode_unregistered_user_data(h, size) < 0)
                return -1;
            break;
        case SEI_TYPE_RECOVERY_POINT:
            if(decode_recovery_point(h) < 0)
                return -1;
            break;
        case SEI_BUFFERING_PERIOD:
            if(decode_buffering_period(h) < 0)
                return -1;
            break;
        default:
            skip_bits(&s->gb, 8*size);
        }

        //FIXME check bits here
        align_get_bits(&s->gb);
    }

    return 0;
}
Esempio n. 12
0
/**
 * decodes the group of blocks / video packet header.
 * @return <0 if no resync found
 */
static int ff_h261_resync(H261Context *h)
{
	MpegEncContext * const s = &h->s;
	int left, ret;

	if ( h->gob_start_code_skipped )
	{
		ret= h261_decode_gob_header(h);
		if(ret>=0)
			return 0;
	}
	else
	{
		if(show_bits(&s->gb, 15)==0)
		{
			ret= h261_decode_gob_header(h);
			if(ret>=0)
				return 0;
		}
		//OK, it is not where it is supposed to be ...
		s->gb= s->last_resync_gb;
		align_get_bits(&s->gb);
		left= get_bits_left(&s->gb);

		for(; left>15+1+4+5; left-=8)
		{
			if(show_bits(&s->gb, 15)==0)
			{
				GetBitContext bak= s->gb;

				ret= h261_decode_gob_header(h);
				if(ret>=0)
					return 0;

				s->gb= bak;
			}
			skip_bits(&s->gb, 8);
		}
	}

	return -1;
}
Esempio n. 13
0
static int decode_frame(AVCodecContext *avctx, void *data, int *data_size,
                        AVPacket *avpkt) {
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    AVSubtitle *sub = data;
    const uint8_t *buf_end = buf + buf_size;
    uint8_t *bitmap;
    int w, h, x, y, i;
    int64_t packet_time = 0;
    GetBitContext gb;
    int has_alpha = avctx->codec_tag == MKTAG('D','X','S','A');

    // check that at least header fits
    if (buf_size < 27 + 7 * 2 + 4 * 3) {
        av_log(avctx, AV_LOG_ERROR, "coded frame too small\n");
        return -1;
    }

    // read start and end time
    if (buf[0] != '[' || buf[13] != '-' || buf[26] != ']') {
        av_log(avctx, AV_LOG_ERROR, "invalid time code\n");
        return -1;
    }
    if (avpkt->pts != AV_NOPTS_VALUE)
        packet_time = av_rescale_q(avpkt->pts, AV_TIME_BASE_Q, (AVRational){1, 1000});
    sub->start_display_time = parse_timecode(buf +  1, packet_time);
    sub->end_display_time   = parse_timecode(buf + 14, packet_time);
    buf += 27;

    // read header
    w = bytestream_get_le16(&buf);
    h = bytestream_get_le16(&buf);
    if (av_image_check_size(w, h, 0, avctx) < 0)
        return -1;
    x = bytestream_get_le16(&buf);
    y = bytestream_get_le16(&buf);
    // skip bottom right position, it gives no new information
    bytestream_get_le16(&buf);
    bytestream_get_le16(&buf);
    // The following value is supposed to indicate the start offset
    // (relative to the palette) of the data for the second field,
    // however there are files  where it has a bogus value and thus
    // we just ignore it
    bytestream_get_le16(&buf);

    // allocate sub and set values
    sub->rects =  av_mallocz(sizeof(*sub->rects));
    sub->rects[0] = av_mallocz(sizeof(*sub->rects[0]));
    sub->num_rects = 1;
    sub->rects[0]->x = x; sub->rects[0]->y = y;
    sub->rects[0]->w = w; sub->rects[0]->h = h;
    sub->rects[0]->type = SUBTITLE_BITMAP;
    sub->rects[0]->pict.linesize[0] = w;
    sub->rects[0]->pict.data[0] = av_malloc(w * h);
    sub->rects[0]->nb_colors = 4;
    sub->rects[0]->pict.data[1] = av_mallocz(AVPALETTE_SIZE);

    // read palette
    for (i = 0; i < sub->rects[0]->nb_colors; i++)
        ((uint32_t*)sub->rects[0]->pict.data[1])[i] = bytestream_get_be24(&buf);
    // make all except background (first entry) non-transparent
    for (i = 0; i < sub->rects[0]->nb_colors; i++)
        ((uint32_t*)sub->rects[0]->pict.data[1])[i] |= (has_alpha ? *buf++ : (i ? 0xff : 0)) << 24;

    // process RLE-compressed data
    init_get_bits(&gb, buf, (buf_end - buf) * 8);
    bitmap = sub->rects[0]->pict.data[0];
    for (y = 0; y < h; y++) {
        // interlaced: do odd lines
        if (y == (h + 1) / 2) bitmap = sub->rects[0]->pict.data[0] + w;
        for (x = 0; x < w; ) {
            int log2 = ff_log2_tab[show_bits(&gb, 8)];
            int run = get_bits(&gb, 14 - 4 * (log2 >> 1));
            int color = get_bits(&gb, 2);
            run = FFMIN(run, w - x);
            // run length 0 means till end of row
            if (!run) run = w - x;
            memset(bitmap, color, run);
            bitmap += run;
            x += run;
        }
        // interlaced, skip every second line
        bitmap += w;
        align_get_bits(&gb);
    }
    *data_size = 1;
    return buf_size;
}
Esempio n. 14
0
int ff_h264_sei_decode(H264SEIContext *h, GetBitContext *gb,
                       const H264ParamSets *ps, void *logctx)
{
    int master_ret = 0;

    while (get_bits_left(gb) > 16 && show_bits(gb, 16)) {
        int type = 0;
        unsigned size = 0;
        unsigned next;
        int ret  = 0;

        do {
            if (get_bits_left(gb) < 8)
                return AVERROR_INVALIDDATA;
            type += show_bits(gb, 8);
        } while (get_bits(gb, 8) == 255);

        do {
            if (get_bits_left(gb) < 8)
                return AVERROR_INVALIDDATA;
            size += show_bits(gb, 8);
        } while (get_bits(gb, 8) == 255);

        if (size > get_bits_left(gb) / 8) {
            av_log(logctx, AV_LOG_ERROR, "SEI type %d size %d truncated at %d\n",
                   type, 8*size, get_bits_left(gb));
            return AVERROR_INVALIDDATA;
        }
        next = get_bits_count(gb) + 8 * size;

        switch (type) {
        case SEI_TYPE_PIC_TIMING: // Picture timing SEI
            ret = decode_picture_timing(&h->picture_timing, gb, ps, logctx);
            break;
        case SEI_TYPE_USER_DATA_REGISTERED:
            ret = decode_registered_user_data(h, gb, logctx, size);
            break;
        case SEI_TYPE_USER_DATA_UNREGISTERED:
            ret = decode_unregistered_user_data(&h->unregistered, gb, logctx, size);
            break;
        case SEI_TYPE_RECOVERY_POINT:
            ret = decode_recovery_point(&h->recovery_point, gb);
            break;
        case SEI_TYPE_BUFFERING_PERIOD:
            ret = decode_buffering_period(&h->buffering_period, gb, ps, logctx);
            break;
        case SEI_TYPE_FRAME_PACKING:
            ret = decode_frame_packing_arrangement(&h->frame_packing, gb);
            break;
        case SEI_TYPE_DISPLAY_ORIENTATION:
            ret = decode_display_orientation(&h->display_orientation, gb);
            break;
        case SEI_TYPE_GREEN_METADATA:
            ret = decode_green_metadata(&h->green_metadata, gb);
            break;
        default:
            av_log(logctx, AV_LOG_DEBUG, "unknown SEI type %d\n", type);
        }
        if (ret < 0 && ret != AVERROR_PS_NOT_FOUND)
            return ret;
        if (ret < 0)
            master_ret = ret;

        skip_bits_long(gb, next - get_bits_count(gb));

        // FIXME check bits here
        align_get_bits(gb);
    }

    return master_ret;
}
Esempio n. 15
0
static int decode_frame(FLACContext *s)
{
    int i, ret;
    GetBitContext *gb = &s->gb;
    FLACFrameInfo fi;

    if ((ret = ff_flac_decode_frame_header(s->avctx, gb, &fi, 0)) < 0) {
        av_log(s->avctx, AV_LOG_ERROR, "invalid frame header\n");
        return ret;
    }

    if (s->channels && fi.channels != s->channels && s->got_streaminfo) {
        s->channels = s->avctx->channels = fi.channels;
        ff_flac_set_channel_layout(s->avctx);
        ret = allocate_buffers(s);
        if (ret < 0)
            return ret;
    }
    s->channels = s->avctx->channels = fi.channels;
    if (!s->avctx->channel_layout)
        ff_flac_set_channel_layout(s->avctx);
    s->ch_mode = fi.ch_mode;

    if (!s->bps && !fi.bps) {
        av_log(s->avctx, AV_LOG_ERROR, "bps not found in STREAMINFO or frame header\n");
        return AVERROR_INVALIDDATA;
    }
    if (!fi.bps) {
        fi.bps = s->bps;
    } else if (s->bps && fi.bps != s->bps) {
        av_log(s->avctx, AV_LOG_ERROR, "switching bps mid-stream is not "
                                       "supported\n");
        return AVERROR_INVALIDDATA;
    }

    if (!s->bps) {
        s->bps = s->avctx->bits_per_raw_sample = fi.bps;
        flac_set_bps(s);
    }

    if (!s->max_blocksize)
        s->max_blocksize = FLAC_MAX_BLOCKSIZE;
    if (fi.blocksize > s->max_blocksize) {
        av_log(s->avctx, AV_LOG_ERROR, "blocksize %d > %d\n", fi.blocksize,
               s->max_blocksize);
        return AVERROR_INVALIDDATA;
    }
    s->blocksize = fi.blocksize;

    if (!s->samplerate && !fi.samplerate) {
        av_log(s->avctx, AV_LOG_ERROR, "sample rate not found in STREAMINFO"
                                        " or frame header\n");
        return AVERROR_INVALIDDATA;
    }
    if (fi.samplerate == 0)
        fi.samplerate = s->samplerate;
    s->samplerate = s->avctx->sample_rate = fi.samplerate;

    if (!s->got_streaminfo) {
        ret = allocate_buffers(s);
        if (ret < 0)
            return ret;
        ff_flacdsp_init(&s->dsp, s->avctx->sample_fmt, s->bps);
        s->got_streaminfo = 1;
        dump_headers(s->avctx, (FLACStreaminfo *)s);
    }

//    dump_headers(s->avctx, (FLACStreaminfo *)s);

    /* subframes */
    for (i = 0; i < s->channels; i++) {
        if ((ret = decode_subframe(s, i)) < 0)
            return ret;
    }

    align_get_bits(gb);

    /* frame footer */
    skip_bits(gb, 16); /* data crc */

    return 0;
}
Esempio n. 16
0
int ff_h264_decode_sei(H264Context *h)
{
    while (get_bits_left(&h->gb) > 16) {
        int size = 0;
        int type = 0;
        int ret  = 0;
        int last = 0;

        while (get_bits_left(&h->gb) >= 8 &&
               (last = get_bits(&h->gb, 8)) == 255) {
            type += 255;
        }
        type += last;

        last = 0;
        while (get_bits_left(&h->gb) >= 8 &&
               (last = get_bits(&h->gb, 8)) == 255) {
            size += 255;
        }
        size += last;

        if (size > get_bits_left(&h->gb) / 8) {
            av_log(h->avctx, AV_LOG_ERROR, "SEI type %d truncated at %d\n",
                   type, get_bits_left(&h->gb));
            return AVERROR_INVALIDDATA;
        }

        switch (type) {
        case SEI_TYPE_PIC_TIMING: // Picture timing SEI
            ret = decode_picture_timing(h);
            break;
        case SEI_TYPE_USER_DATA_REGISTERED:
            ret = decode_registered_user_data(h, size);
            break;
        case SEI_TYPE_USER_DATA_UNREGISTERED:
            ret = decode_unregistered_user_data(h, size);
            break;
        case SEI_TYPE_RECOVERY_POINT:
            ret = decode_recovery_point(h);
            break;
        case SEI_TYPE_BUFFERING_PERIOD:
            ret = decode_buffering_period(h);
            break;
        case SEI_TYPE_FRAME_PACKING:
            ret = decode_frame_packing_arrangement(h);
            break;
        case SEI_TYPE_DISPLAY_ORIENTATION:
            ret = decode_display_orientation(h);
            break;
        default:
            av_log(h->avctx, AV_LOG_DEBUG, "unknown SEI type %d\n", type);
            skip_bits(&h->gb, 8 * size);
        }
        if (ret < 0)
            return ret;

        // FIXME check bits here
        align_get_bits(&h->gb);
    }

    return 0;
}
Esempio n. 17
0
int ff_h264_decode_sei(H264Context *h)
{
    while (get_bits_left(&h->gb) > 16) {
        int type = 0;
        unsigned size = 0;
        unsigned next;
        int ret  = 0;

        do {
            if (get_bits_left(&h->gb) < 8)
                return AVERROR_INVALIDDATA;
            type += show_bits(&h->gb, 8);
        } while (get_bits(&h->gb, 8) == 255);

        do {
            if (get_bits_left(&h->gb) < 8)
                return AVERROR_INVALIDDATA;
            size += show_bits(&h->gb, 8);
        } while (get_bits(&h->gb, 8) == 255);

        if (h->avctx->debug&FF_DEBUG_STARTCODE)
            av_log(h->avctx, AV_LOG_DEBUG, "SEI %d len:%d\n", type, size);

        if (size > get_bits_left(&h->gb) / 8) {
            av_log(h->avctx, AV_LOG_ERROR, "SEI type %d size %d truncated at %d\n",
                   type, 8*size, get_bits_left(&h->gb));
            return AVERROR_INVALIDDATA;
        }
        next = get_bits_count(&h->gb) + 8 * size;

        switch (type) {
        case SEI_TYPE_PIC_TIMING: // Picture timing SEI
            ret = decode_picture_timing(h);
            if (ret < 0)
                return ret;
            break;
        case SEI_TYPE_USER_DATA_ITU_T_T35:
            if (decode_user_data_itu_t_t35(h, size) < 0)
                return -1;
            break;
        case SEI_TYPE_USER_DATA_UNREGISTERED:
            ret = decode_unregistered_user_data(h, size);
            if (ret < 0)
                return ret;
            break;
        case SEI_TYPE_RECOVERY_POINT:
            ret = decode_recovery_point(h);
            if (ret < 0)
                return ret;
            break;
        case SEI_BUFFERING_PERIOD:
            ret = decode_buffering_period(h);
            if (ret < 0)
                return ret;
            break;
        case SEI_TYPE_FRAME_PACKING:
            ret = decode_frame_packing_arrangement(h);
            if (ret < 0)
                return ret;
            break;
        default:
            av_log(h->avctx, AV_LOG_DEBUG, "unknown SEI type %d\n", type);
        }
        skip_bits_long(&h->gb, next - get_bits_count(&h->gb));

        // FIXME check bits here
        align_get_bits(&h->gb);
    }

    return 0;
}
Esempio n. 18
0
static int decode_frame(AVCodecContext *avctx, void *data, int *data_size,
                        const uint8_t *buf, int buf_size) {
    AVSubtitle *sub = data;
    const uint8_t *buf_end = buf + buf_size;
    uint8_t *bitmap;
    int w, h, x, y, rlelen, i;
    GetBitContext gb;

    // check that at least header fits
    if (buf_size < 27 + 7 * 2 + 4 * 3) {
        av_log(avctx, AV_LOG_ERROR, "coded frame too small\n");
        return -1;
    }

    // read start and end time
    if (buf[0] != '[' || buf[13] != '-' || buf[26] != ']') {
        av_log(avctx, AV_LOG_ERROR, "invalid time code\n");
        return -1;
    }
    sub->start_display_time = parse_timecode(buf +  1);
    sub->end_display_time   = parse_timecode(buf + 14);
    buf += 27;

    // read header
    w = bytestream_get_le16(&buf);
    h = bytestream_get_le16(&buf);
    if (avcodec_check_dimensions(avctx, w, h) < 0)
        return -1;
    x = bytestream_get_le16(&buf);
    y = bytestream_get_le16(&buf);
    // skip bottom right position, it gives no new information
    bytestream_get_le16(&buf);
    bytestream_get_le16(&buf);
    rlelen = bytestream_get_le16(&buf);

    // allocate sub and set values
    if (!sub->rects) {
        sub->rects = av_mallocz(sizeof(AVSubtitleRect));
        sub->num_rects = 1;
    }
    av_freep(&sub->rects[0].bitmap);
    sub->rects[0].x = x; sub->rects[0].y = y;
    sub->rects[0].w = w; sub->rects[0].h = h;
    sub->rects[0].linesize = w;
    sub->rects[0].bitmap = av_malloc(w * h);
    sub->rects[0].nb_colors = 4;
    sub->rects[0].rgba_palette = av_malloc(sub->rects[0].nb_colors * 4);

    // read palette
    for (i = 0; i < sub->rects[0].nb_colors; i++)
        sub->rects[0].rgba_palette[i] = bytestream_get_be24(&buf);
    // make all except background (first entry) non-transparent
    for (i = 1; i < sub->rects[0].nb_colors; i++)
        sub->rects[0].rgba_palette[i] |= 0xff000000;

    // process RLE-compressed data
    rlelen = FFMIN(rlelen, buf_end - buf);
    init_get_bits(&gb, buf, rlelen * 8);
    bitmap = sub->rects[0].bitmap;
    for (y = 0; y < h; y++) {
        // interlaced: do odd lines
        if (y == (h + 1) / 2) bitmap = sub->rects[0].bitmap + w;
        for (x = 0; x < w; ) {
            int log2 = ff_log2_tab[show_bits(&gb, 8)];
            int run = get_bits(&gb, 14 - 4 * (log2 >> 1));
            int color = get_bits(&gb, 2);
            run = FFMIN(run, w - x);
            // run length 0 means till end of row
            if (!run) run = w - x;
            memset(bitmap, color, run);
            bitmap += run;
            x += run;
        }
        // interlaced, skip every second line
        bitmap += w;
        align_get_bits(&gb);
    }
    *data_size = 1;
    return buf_size;
}
Esempio n. 19
0
static int
avs_decode_frame(AVCodecContext * avctx,
                 void *data, int *data_size, AVPacket *avpkt)
{
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    AvsContext *const avs = avctx->priv_data;
    AVFrame *picture = data;
    AVFrame *const p = (AVFrame *) & avs->picture;
    const uint8_t *table, *vect;
    uint8_t *out;
    int i, j, x, y, stride, vect_w = 3, vect_h = 3;
    AvsVideoSubType sub_type;
    AvsBlockType type;
    GetBitContext change_map;

    if (avctx->reget_buffer(avctx, p)) {
        av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
        return -1;
    }
    p->reference = 1;
    p->pict_type = FF_P_TYPE;
    p->key_frame = 0;

    out = avs->picture.data[0];
    stride = avs->picture.linesize[0];

    sub_type = buf[0];
    type = buf[1];
    buf += 4;

    if (type == AVS_PALETTE) {
        int first, last;
        uint32_t *pal = (uint32_t *) avs->picture.data[1];

        first = AV_RL16(buf);
        last = first + AV_RL16(buf + 2);
        buf += 4;
        for (i=first; i<last; i++, buf+=3)
            pal[i] = (buf[0] << 18) | (buf[1] << 10) | (buf[2] << 2);

        sub_type = buf[0];
        type = buf[1];
        buf += 4;
    }

    if (type != AVS_VIDEO)
        return -1;

    switch (sub_type) {
    case AVS_I_FRAME:
        p->pict_type = FF_I_TYPE;
        p->key_frame = 1;
    case AVS_P_FRAME_3X3:
        vect_w = 3;
        vect_h = 3;
        break;

    case AVS_P_FRAME_2X2:
        vect_w = 2;
        vect_h = 2;
        break;

    case AVS_P_FRAME_2X3:
        vect_w = 2;
        vect_h = 3;
        break;

    default:
      return -1;
    }

    table = buf + (256 * vect_w * vect_h);
    if (sub_type != AVS_I_FRAME) {
        int map_size = ((318 / vect_w + 7) / 8) * (198 / vect_h);
        init_get_bits(&change_map, table, map_size);
        table += map_size;
    }

    for (y=0; y<198; y+=vect_h) {
        for (x=0; x<318; x+=vect_w) {
            if (sub_type == AVS_I_FRAME || get_bits1(&change_map)) {
                vect = &buf[*table++ * (vect_w * vect_h)];
                for (j=0; j<vect_w; j++) {
                    out[(y + 0) * stride + x + j] = vect[(0 * vect_w) + j];
                    out[(y + 1) * stride + x + j] = vect[(1 * vect_w) + j];
                    if (vect_h == 3)
                        out[(y + 2) * stride + x + j] =
                            vect[(2 * vect_w) + j];
                }
            }
        }
        if (sub_type != AVS_I_FRAME)
            align_get_bits(&change_map);
    }

    *picture = *(AVFrame *) & avs->picture;
    *data_size = sizeof(AVPicture);

    return buf_size;
}
Esempio n. 20
0
static int decode_frame(AVCodecContext *avctx, void *data, int *data_size,
                        AVPacket *avpkt) {
    const uint8_t *buf = avpkt->data;
    int buf_size = avpkt->size;
    AVSubtitle *sub = data;
    const uint8_t *buf_end = buf + buf_size;
    uint8_t *bitmap;
    int w, h, x, y, rlelen, i;
    int64_t packet_time = 0;
    GetBitContext gb;

    memset(sub, 0, sizeof(*sub));

    // check that at least header fits
    if (buf_size < 27 + 7 * 2 + 4 * 3) {
        av_log(avctx, AV_LOG_ERROR, "coded frame too small\n");
        return -1;
    }

    // read start and end time
    if (buf[0] != '[' || buf[13] != '-' || buf[26] != ']') {
        av_log(avctx, AV_LOG_ERROR, "invalid time code\n");
        return -1;
    }
    if (avpkt->pts != AV_NOPTS_VALUE)
        packet_time = av_rescale_q(avpkt->pts, AV_TIME_BASE_Q, (AVRational){1, 1000});
    sub->start_display_time = parse_timecode(buf +  1, packet_time);
    sub->end_display_time   = parse_timecode(buf + 14, packet_time);
    buf += 27;

    // read header
    w = bytestream_get_le16(&buf);
    h = bytestream_get_le16(&buf);
    if (avcodec_check_dimensions(avctx, w, h) < 0)
        return -1;
    x = bytestream_get_le16(&buf);
    y = bytestream_get_le16(&buf);

#ifdef SUPPORT_DIVX_DRM
	if((video_height - (y+h)) > 30)
	{
		y = video_height-30-h-1;
	}
#endif /* end of SUPPORT_DIVX_DRM */	
	   
    // skip bottom right position, it gives no new information
    bytestream_get_le16(&buf);
    bytestream_get_le16(&buf);
    rlelen = bytestream_get_le16(&buf);

    // allocate sub and set values
    sub->rects =  av_mallocz(sizeof(*sub->rects));
    sub->rects[0] = av_mallocz(sizeof(*sub->rects[0]));
    sub->num_rects = 1;
    sub->rects[0]->x = x; sub->rects[0]->y = y;
    sub->rects[0]->w = w; sub->rects[0]->h = h;
    sub->rects[0]->type = SUBTITLE_BITMAP;
    sub->rects[0]->pict.linesize[0] = w;
    sub->rects[0]->pict.data[0] = av_malloc(w * h);
    sub->rects[0]->nb_colors = 4;
    sub->rects[0]->pict.data[1] = av_mallocz(AVPALETTE_SIZE);

    // read palette
    for (i = 0; i < sub->rects[0]->nb_colors; i++)
        ((uint32_t*)sub->rects[0]->pict.data[1])[i] = bytestream_get_be24(&buf);
    // make all except background (first entry) non-transparent
#if 1
	if(sub_type == 2)	//DXSA
	{
		for (i = 0; i < sub->rects[0]->nb_colors; i++)
		{
			if(buf[i])
				((uint32_t*)sub->rects[0]->pict.data[1])[i] |= 0xff000000;
		}
		if(buf[0] == buf[1] && buf[0] == buf[2] && buf[0] == buf[3] && buf[1] == buf[2] && buf[1] == buf[3] && buf[2] == buf[3] && buf[0] < 0xff)
		{
			transport_float = (float)buf[0] / 256.0;
		}
		
		buf += 4;
	}
	else
	{
		for (i = 1; i < sub->rects[0]->nb_colors; i++)
			((uint32_t*)sub->rects[0]->pict.data[1])[i] |= 0xff000000;
	}
#else
    for (i = 1; i < sub->rects[0]->nb_colors; i++)
        ((uint32_t*)sub->rects[0]->pict.data[1])[i] |= 0xff000000;
#endif

    // process RLE-compressed data
    rlelen = FFMIN(rlelen, buf_end - buf);
    init_get_bits(&gb, buf, rlelen * 8);
    bitmap = sub->rects[0]->pict.data[0];
    for (y = 0; y < h; y++) {
        // interlaced: do odd lines
        if (y == (h + 1) / 2) bitmap = sub->rects[0]->pict.data[0] + w;
        for (x = 0; x < w; ) {
            int log2 = ff_log2_tab[show_bits(&gb, 8)];
            int run = get_bits(&gb, 14 - 4 * (log2 >> 1));
            int color = get_bits(&gb, 2);
            run = FFMIN(run, w - x);
            // run length 0 means till end of row
            if (!run) run = w - x;
            memset(bitmap, color, run);
            bitmap += run;
            x += run;
        }
        // interlaced, skip every second line
        bitmap += w;
        align_get_bits(&gb);
    }
    *data_size = 1;
    return buf_size;
}