Exemplo n.º 1
0
av_cold int ff_vp56_init_context(AVCodecContext *avctx, VP56Context *s,
                                  int flip, int has_alpha)
{
    int i;

    s->avctx = avctx;
    avctx->pix_fmt = has_alpha ? AV_PIX_FMT_YUVA420P : AV_PIX_FMT_YUV420P;
    if (avctx->skip_alpha) avctx->pix_fmt = AV_PIX_FMT_YUV420P;

    ff_h264chroma_init(&s->h264chroma, 8);
    ff_hpeldsp_init(&s->hdsp, avctx->flags);
    ff_videodsp_init(&s->vdsp, 8);
    ff_vp3dsp_init(&s->vp3dsp, avctx->flags);
    ff_vp56dsp_init(&s->vp56dsp, avctx->codec->id);
    for (i = 0; i < 64; i++) {
#define T(x) (x >> 3) | ((x & 7) << 3)
        s->idct_scantable[i] = T(ff_zigzag_direct[i]);
#undef T
    }

    for (i = 0; i < FF_ARRAY_ELEMS(s->frames); i++) {
        s->frames[i] = av_frame_alloc();
        if (!s->frames[i]) {
            ff_vp56_free(avctx);
            return AVERROR(ENOMEM);
        }
    }
    s->edge_emu_buffer_alloc = NULL;

    s->above_blocks = NULL;
    s->macroblocks = NULL;
    s->quantizer = -1;
    s->deblock_filtering = 1;
    s->golden_frame = 0;

    s->filter = NULL;

    s->has_alpha = has_alpha;

    s->modelp = &s->model;

    if (flip) {
        s->flip = -1;
        s->frbi = 2;
        s->srbi = 0;
    } else {
        s->flip = 1;
        s->frbi = 0;
        s->srbi = 2;
    }

    return 0;
}
Exemplo n.º 2
0
av_cold void ff_rv30dsp_init(RV34DSPContext *c)
{
    H264ChromaContext h264chroma;
    H264QpelContext qpel;

    ff_rv34dsp_init(c);
    ff_h264chroma_init(&h264chroma, 8);
    ff_h264qpel_init(&qpel, 8);

    c->put_pixels_tab[0][ 0] = qpel.put_h264_qpel_pixels_tab[0][0];
    c->put_pixels_tab[0][ 1] = put_rv30_tpel16_mc10_c;
    c->put_pixels_tab[0][ 2] = put_rv30_tpel16_mc20_c;
    c->put_pixels_tab[0][ 4] = put_rv30_tpel16_mc01_c;
    c->put_pixels_tab[0][ 5] = put_rv30_tpel16_mc11_c;
    c->put_pixels_tab[0][ 6] = put_rv30_tpel16_mc21_c;
    c->put_pixels_tab[0][ 8] = put_rv30_tpel16_mc02_c;
    c->put_pixels_tab[0][ 9] = put_rv30_tpel16_mc12_c;
    c->put_pixels_tab[0][10] = put_rv30_tpel16_mc22_c;
    c->avg_pixels_tab[0][ 0] = qpel.avg_h264_qpel_pixels_tab[0][0];
    c->avg_pixels_tab[0][ 1] = avg_rv30_tpel16_mc10_c;
    c->avg_pixels_tab[0][ 2] = avg_rv30_tpel16_mc20_c;
    c->avg_pixels_tab[0][ 4] = avg_rv30_tpel16_mc01_c;
    c->avg_pixels_tab[0][ 5] = avg_rv30_tpel16_mc11_c;
    c->avg_pixels_tab[0][ 6] = avg_rv30_tpel16_mc21_c;
    c->avg_pixels_tab[0][ 8] = avg_rv30_tpel16_mc02_c;
    c->avg_pixels_tab[0][ 9] = avg_rv30_tpel16_mc12_c;
    c->avg_pixels_tab[0][10] = avg_rv30_tpel16_mc22_c;
    c->put_pixels_tab[1][ 0] = qpel.put_h264_qpel_pixels_tab[1][0];
    c->put_pixels_tab[1][ 1] = put_rv30_tpel8_mc10_c;
    c->put_pixels_tab[1][ 2] = put_rv30_tpel8_mc20_c;
    c->put_pixels_tab[1][ 4] = put_rv30_tpel8_mc01_c;
    c->put_pixels_tab[1][ 5] = put_rv30_tpel8_mc11_c;
    c->put_pixels_tab[1][ 6] = put_rv30_tpel8_mc21_c;
    c->put_pixels_tab[1][ 8] = put_rv30_tpel8_mc02_c;
    c->put_pixels_tab[1][ 9] = put_rv30_tpel8_mc12_c;
    c->put_pixels_tab[1][10] = put_rv30_tpel8_mc22_c;
    c->avg_pixels_tab[1][ 0] = qpel.avg_h264_qpel_pixels_tab[1][0];
    c->avg_pixels_tab[1][ 1] = avg_rv30_tpel8_mc10_c;
    c->avg_pixels_tab[1][ 2] = avg_rv30_tpel8_mc20_c;
    c->avg_pixels_tab[1][ 4] = avg_rv30_tpel8_mc01_c;
    c->avg_pixels_tab[1][ 5] = avg_rv30_tpel8_mc11_c;
    c->avg_pixels_tab[1][ 6] = avg_rv30_tpel8_mc21_c;
    c->avg_pixels_tab[1][ 8] = avg_rv30_tpel8_mc02_c;
    c->avg_pixels_tab[1][ 9] = avg_rv30_tpel8_mc12_c;
    c->avg_pixels_tab[1][10] = avg_rv30_tpel8_mc22_c;

    c->put_chroma_pixels_tab[0] = h264chroma.put_h264_chroma_pixels_tab[0];
    c->put_chroma_pixels_tab[1] = h264chroma.put_h264_chroma_pixels_tab[1];
    c->avg_chroma_pixels_tab[0] = h264chroma.avg_h264_chroma_pixels_tab[0];
    c->avg_chroma_pixels_tab[1] = h264chroma.avg_h264_chroma_pixels_tab[1];
}
Exemplo n.º 3
0
av_cold int ff_vp56_init_context(AVCodecContext *avctx, VP56Context *s,
                                  int flip, int has_alpha)
{
    int i;

    s->avctx = avctx;
    avctx->pix_fmt = has_alpha ? AV_PIX_FMT_YUVA420P : AV_PIX_FMT_YUV420P;

    ff_dsputil_init(&s->dsp, avctx);
    ff_h264chroma_init(&s->h264chroma, 8);
    ff_videodsp_init(&s->vdsp, 8);
    ff_vp3dsp_init(&s->vp3dsp, avctx->flags);
    ff_vp56dsp_init(&s->vp56dsp, avctx->codec->id);
    ff_init_scantable_permutation(s->dsp.idct_permutation, s->vp3dsp.idct_perm);
    ff_init_scantable(s->dsp.idct_permutation, &s->scantable,ff_zigzag_direct);

    for (i = 0; i < FF_ARRAY_ELEMS(s->frames); i++) {
        s->frames[i] = av_frame_alloc();
        if (!s->frames[i]) {
            ff_vp56_free(avctx);
            return AVERROR(ENOMEM);
        }
    }
    s->edge_emu_buffer_alloc = NULL;

    s->above_blocks = NULL;
    s->macroblocks = NULL;
    s->quantizer = -1;
    s->deblock_filtering = 1;
    s->golden_frame = 0;

    s->filter = NULL;

    s->has_alpha = has_alpha;

    s->modelp = &s->model;

    if (flip) {
        s->flip = -1;
        s->frbi = 2;
        s->srbi = 0;
    } else {
        s->flip = 1;
        s->frbi = 0;
        s->srbi = 2;
    }

    return 0;
}