Exemplo n.º 1
0
static int check_outfmt(vf_instance_t *vf, int outfmt)
{
    enum AVPixelFormat pixfmt = imgfmt2pixfmt(outfmt);
    if (pixfmt == AV_PIX_FMT_NONE || sws_isSupportedOutput(pixfmt) < 1)
        return 0;
    return vf_next_query_format(vf, outfmt);
}
Exemplo n.º 2
0
static int
config(uint32_t width, uint32_t height, uint32_t d_width, uint32_t d_height, uint32_t flags, char *title, uint32_t format)
{
	    if(z_compression == 0) {
 		    mp_tmsg(MSGT_VO,MSGL_INFO, "[VO_PNG] Warning: compression level set to 0, compression disabled!\n");
 		    mp_tmsg(MSGT_VO,MSGL_INFO, "[VO_PNG] Info: Use -vo png:z=<n> to set compression level from 0 to 9.\n");
 		    mp_tmsg(MSGT_VO,MSGL_INFO, "[VO_PNG] Info: (0 = no compression, 1 = fastest, lowest - 9 best, slowest compression)\n");
	    }

    mp_msg(MSGT_VO,MSGL_DBG2, "PNG Compression level %i\n", z_compression);
    uninit();
    struct AVCodec *png_codec = avcodec_find_encoder(AV_CODEC_ID_PNG);
    if (!png_codec)
        goto error;
    avctx = avcodec_alloc_context3(png_codec);
    if (!avctx)
        goto error;
    avctx->width = width;
    avctx->height = height;
    avctx->pix_fmt = imgfmt2pixfmt(format);
    avctx->compression_level = z_compression;
    if (avcodec_open2(avctx, png_codec, NULL) < 0)
        goto error;
    return 0;

 error:
    uninit();
    return -1;
}
Exemplo n.º 3
0
static int write_lavc(struct image_writer_ctx *ctx, mp_image_t *image, FILE *fp)
{
    int success = 0;
    AVFrame *pic = NULL;
    AVPacket pkt = {0};
    int got_output = 0;

    av_init_packet(&pkt);

    struct AVCodec *codec = avcodec_find_encoder(ctx->writer->lavc_codec);
    AVCodecContext *avctx = NULL;
    if (!codec)
        goto print_open_fail;
    avctx = avcodec_alloc_context3(codec);
    if (!avctx)
        goto print_open_fail;

    avctx->time_base = AV_TIME_BASE_Q;
    avctx->width = image->w;
    avctx->height = image->h;
    avctx->pix_fmt = imgfmt2pixfmt(image->imgfmt);
    if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
        MP_ERR(ctx, "Image format %s not supported by lavc.\n",
               mp_imgfmt_to_name(image->imgfmt));
        goto error_exit;
    }
    if (ctx->writer->lavc_codec == AV_CODEC_ID_PNG) {
        avctx->compression_level = ctx->opts->png_compression;
        avctx->prediction_method = ctx->opts->png_filter;
    }

    if (avcodec_open2(avctx, codec, NULL) < 0) {
     print_open_fail:
        MP_ERR(ctx, "Could not open libavcodec encoder for saving images\n");
        goto error_exit;
    }

    pic = av_frame_alloc();
    if (!pic)
        goto error_exit;
    for (int n = 0; n < 4; n++) {
        pic->data[n] = image->planes[n];
        pic->linesize[n] = image->stride[n];
    }
    int ret = avcodec_encode_video2(avctx, &pkt, pic, &got_output);
    if (ret < 0)
        goto error_exit;

    fwrite(pkt.data, pkt.size, 1, fp);

    success = !!got_output;
error_exit:
    if (avctx)
        avcodec_close(avctx);
    av_free(avctx);
    av_frame_free(&pic);
    av_free_packet(&pkt);
    return success;
}
Exemplo n.º 4
0
static int query_format(struct vf_instance *vf, unsigned int fmt)
{
    if (!IMGFMT_IS_HWACCEL(fmt) && imgfmt2pixfmt(fmt) != AV_PIX_FMT_NONE) {
        if (sws_isSupportedInput(imgfmt2pixfmt(fmt)) < 1)
            return 0;
        unsigned int best = find_best_out(vf, fmt);
        int flags;
        if (!best)
            return 0;            // no matching out-fmt
        flags = vf_next_query_format(vf, best);
        if (!(flags & (VFCAP_CSP_SUPPORTED | VFCAP_CSP_SUPPORTED_BY_HW)))
            return 0;
        if (fmt != best)
            flags &= ~VFCAP_CSP_SUPPORTED_BY_HW;
        return flags;
    }
    return 0;   // nomatching in-fmt
}
Exemplo n.º 5
0
static int write_lavc(struct image_writer_ctx *ctx, mp_image_t *image, FILE *fp)
{
    void *outbuffer = NULL;
    int success = 0;
    AVFrame *pic = NULL;

    struct AVCodec *codec = avcodec_find_encoder(ctx->writer->lavc_codec);
    AVCodecContext *avctx = NULL;
    if (!codec)
        goto print_open_fail;
    avctx = avcodec_alloc_context3(codec);
    if (!avctx)
        goto print_open_fail;

    avctx->time_base = AV_TIME_BASE_Q;
    avctx->width = image->w;
    avctx->height = image->h;
    avctx->pix_fmt = imgfmt2pixfmt(image->imgfmt);
    if (ctx->writer->lavc_codec == CODEC_ID_PNG)
        avctx->compression_level = ctx->opts->png_compression;

    if (avcodec_open2(avctx, codec, NULL) < 0) {
     print_open_fail:
        mp_msg(MSGT_CPLAYER, MSGL_INFO, "Could not open libavcodec encoder"
               " for saving images\n");
        goto error_exit;
    }

    size_t outbuffer_size = image->w * image->h * 3 * 2;
    outbuffer = malloc(outbuffer_size);
    if (!outbuffer)
        goto error_exit;

    pic = avcodec_alloc_frame();
    if (!pic)
        goto error_exit;
    avcodec_get_frame_defaults(pic);
    for (int n = 0; n < 4; n++) {
        pic->data[n] = image->planes[n];
        pic->linesize[n] = image->stride[n];
    }
    int size = avcodec_encode_video(avctx, outbuffer, outbuffer_size, pic);
    if (size < 1)
        goto error_exit;

    fwrite(outbuffer, size, 1, fp);

    success = 1;
error_exit:
    if (avctx)
        avcodec_close(avctx);
    av_free(avctx);
    avcodec_free_frame(&pic);
    free(outbuffer);
    return success;
}
Exemplo n.º 6
0
static int write_lavc(struct image_writer_ctx *ctx, mp_image_t *image, FILE *fp)
{
    int success = 0;
    AVFrame *pic = NULL;
    AVPacket pkt = {0};
    int got_output = 0;

    av_init_packet(&pkt);

    struct AVCodec *codec = avcodec_find_encoder(ctx->writer->lavc_codec);
    AVCodecContext *avctx = NULL;
    if (!codec)
        goto print_open_fail;
    avctx = avcodec_alloc_context3(codec);
    if (!avctx)
        goto print_open_fail;

    avctx->time_base = AV_TIME_BASE_Q;
    avctx->width = image->w;
    avctx->height = image->h;
    avctx->pix_fmt = imgfmt2pixfmt(image->imgfmt);
    if (ctx->writer->lavc_codec == AV_CODEC_ID_PNG)
        avctx->compression_level = ctx->opts->png_compression;

    if (avcodec_open2(avctx, codec, NULL) < 0) {
     print_open_fail:
        mp_msg(MSGT_CPLAYER, MSGL_INFO, "Could not open libavcodec encoder"
               " for saving images\n");
        goto error_exit;
    }

    pic = avcodec_alloc_frame();
    if (!pic)
        goto error_exit;
    avcodec_get_frame_defaults(pic);
    for (int n = 0; n < 4; n++) {
        pic->data[n] = image->planes[n];
        pic->linesize[n] = image->stride[n];
    }
    int ret = avcodec_encode_video2(avctx, &pkt, pic, &got_output);
    if (ret < 0)
        goto error_exit;

    fwrite(pkt.data, pkt.size, 1, fp);

    success = !!got_output;
error_exit:
    if (avctx)
        avcodec_close(avctx);
    av_free(avctx);
    avcodec_free_frame(&pic);
    av_free_packet(&pkt);
    return success;
}
Exemplo n.º 7
0
static uint32_t draw_image(mp_image_t* mpi){
    AVFrame *pic;
    int buffersize;
    int res, got_pkt;
    char buf[100];
    FILE *outfile;
    AVPacket pkt;

    // if -dr or -slices then do nothing:
    if(mpi->flags&(MP_IMGFLAG_DIRECT|MP_IMGFLAG_DRAW_CALLBACK)) return VO_TRUE;

    snprintf (buf, 100, "%s/%s%08d.png", png_outdir, png_outfile_prefix, ++framenum);
    outfile = fopen(buf, "wb");
    if (!outfile) {
        mp_msg(MSGT_VO,MSGL_WARN, MSGTR_LIBVO_PNG_ErrorOpeningForWriting, strerror(errno));
        return 1;
    }

    pic = av_frame_alloc();
    avctx->width = mpi->w;
    avctx->height = mpi->h;
    pic->width  = mpi->w;
    pic->height = mpi->h;
    pic->format = imgfmt2pixfmt(png_format);
    pic->data[0] = mpi->planes[0];
    pic->linesize[0] = mpi->stride[0];
    buffersize = mpi->w * mpi->h * 8;
    if (outbuffer_size < buffersize) {
        av_freep(&outbuffer);
        outbuffer = av_malloc(buffersize);
        outbuffer_size = buffersize;
    }
    av_init_packet(&pkt);
    pkt.data = outbuffer;
    pkt.size = outbuffer_size;
    res = avcodec_encode_video2(avctx, &pkt, pic, &got_pkt);
    av_frame_free(&pic);

    if (res < 0 || !got_pkt) {
 	    mp_msg(MSGT_VO,MSGL_WARN, MSGTR_LIBVO_PNG_ErrorInCreatePng);
    } else {
        fwrite(outbuffer, pkt.size, 1, outfile);
    }

    fclose(outfile);
    av_free_packet(&pkt);

    return VO_TRUE;
}
Exemplo n.º 8
0
QImage VideoFrame::toImage() const {
	if (!d->image.isNull())
		return d->image;
	if (!d->mpi || d->format.isEmpty())
		return QImage();
	SwsContext *sws = sws_getCachedContext(nullptr
		, d->format.width(), d->format.height(), imgfmt2pixfmt(d->format.type())
		, d->format.width(), d->format.height(), AV_PIX_FMT_RGB24, SWS_FAST_BILINEAR, nullptr, nullptr, nullptr);
	QImage image(d->format.size(), QImage::Format_RGB888);
	const uchar *srcData[] = {data(0), data(1), data(2)};
	const int srcStride[] = {d->format.bytesPerLine(0), d->format.bytesPerLine(1), d->format.bytesPerLine(2)};
	uchar *destData[] = {image.bits()}; const int destStride[] = {image.bytesPerLine()};
	sws_scale(sws, srcData, srcStride, 0, d->format.height(), destData, destStride);
	sws_freeContext(sws);
	return image;
}
Exemplo n.º 9
0
static int find_best_out(vf_instance_t *vf, int in_format)
{
    int best = 0;
    for (int out_format = IMGFMT_START; out_format < IMGFMT_END; out_format++) {
        if (!vf_next_query_format(vf, out_format))
            continue;
        if (sws_isSupportedOutput(imgfmt2pixfmt(out_format)) < 1)
            continue;
        if (best) {
            int candidate = mp_imgfmt_select_best(best, out_format, in_format);
            if (candidate)
                best = candidate;
        } else {
            best = out_format;
        }
    }
    return best;
}
Exemplo n.º 10
0
static uint32_t draw_image(mp_image_t* mpi) {
    AVFrame pic;
    int buffersize;
    int res;
    char buf[100];
    FILE *outfile;

    // if -dr or -slices then do nothing:
    if(mpi->flags&(MP_IMGFLAG_DIRECT|MP_IMGFLAG_DRAW_CALLBACK)) return VO_TRUE;

    snprintf (buf, 100, "%s/%08d.png", png_outdir, ++framenum);
    outfile = fopen(buf, "wb");
    if (!outfile) {
        mp_msg(MSGT_VO,MSGL_WARN, MSGTR_LIBVO_PNG_ErrorOpeningForWriting, strerror(errno));
        return 1;
    }

    avctx->width = mpi->w;
    avctx->height = mpi->h;
    avctx->pix_fmt = imgfmt2pixfmt(mpi->imgfmt);
    pic.data[0] = mpi->planes[0];
    pic.linesize[0] = mpi->stride[0];
    buffersize = mpi->w * mpi->h * 8;
    if (outbuffer_size < buffersize) {
        av_freep(&outbuffer);
        outbuffer = av_malloc(buffersize);
        outbuffer_size = buffersize;
    }
    res = avcodec_encode_video(avctx, outbuffer, outbuffer_size, &pic);

    if(res < 0) {
        mp_msg(MSGT_VO,MSGL_WARN, MSGTR_LIBVO_PNG_ErrorInCreatePng);
        fclose(outfile);
        return 1;
    }

    fwrite(outbuffer, res, 1, outfile);
    fclose(outfile);

    return VO_TRUE;
}
Exemplo n.º 11
0
static int config(struct vf_instance *vf, int w, int h, int dw, int dh,
                        unsigned flags, unsigned fmt)
{
    int ret;
    AVFilterLink *out;
    AVRational iar, dar;

    av_reduce(&iar.num, &iar.den, w, h, INT_MAX);
    av_reduce(&dar.num, &dar.den, dw, dh, INT_MAX);
    vf->priv->in_pixfmt = imgfmt2pixfmt(fmt);
    vf->priv->in_imgfmt = fmt;
    vf->priv->in_w = w;
    vf->priv->in_h = h;
    vf->priv->in_sar = av_div_q(dar, iar);
    ret = avfilter_graph_config(vf->priv->graph, NULL);
    if (ret < 0)
        return 0;
    out = vf->priv->out->inputs[0];
    vf->priv->out_w = out->w;
    vf->priv->out_h = out->h;
    vf->priv->out_pixfmt = out->format;
    vf->priv->out_imgfmt = pixfmt2imgfmt(out->format);
    vf->priv->out_sar = out->sample_aspect_ratio;
    if (vf->priv->out_sar.num != vf->priv->in_sar.num ||
        vf->priv->out_sar.den != vf->priv->in_sar.den ||
        out->w != w || out->h != h) {
        av_reduce(&iar.num, &iar.den, out->w, out->h, INT_MAX);
        dar = av_mul_q(iar, out->sample_aspect_ratio);
        if (av_cmp_q(dar, iar) >= 0) {
            dh = out->h;
            dw = av_rescale(dh, dar.num, dar.den);
        } else {
            dw = out->w;
            dh = av_rescale(dw, dar.den, dar.num);
        }
    }
    return vf_next_config(vf, out->w, out->h, dw, dh, flags, fmt);
}
Exemplo n.º 12
0
Arquivo: vo_x11.c Projeto: chyiz/mpv
static bool resize(struct vo *vo)
{
    struct priv *p = vo->priv;
    struct vo_x11_state *x11 = vo->x11;

    for (int i = 0; i < 2; i++)
        freeMyXImage(p, i);

    vo_get_src_dst_rects(vo, &p->src, &p->dst, &p->osd);

    p->src_w = p->src.x1 - p->src.x0;
    p->src_h = p->src.y1 - p->src.y0;
    p->dst_w = p->dst.x1 - p->dst.x0;
    p->dst_h = p->dst.y1 - p->dst.y0;

    // p->osd contains the parameters assuming OSD rendering in window
    // coordinates, but OSD can only be rendered in the intersection
    // between window and video rectangle (i.e. not into panscan borders).
    p->osd.w = p->dst_w;
    p->osd.h = p->dst_h;
    p->osd.mt = MPMIN(0, p->osd.mt);
    p->osd.mb = MPMIN(0, p->osd.mb);
    p->osd.mr = MPMIN(0, p->osd.mr);
    p->osd.ml = MPMIN(0, p->osd.ml);

    mp_input_set_mouse_transform(vo->input_ctx, &p->dst, NULL);

    p->image_width = (p->dst_w + 7) & (~7);
    p->image_height = p->dst_h;

    for (int i = 0; i < 2; i++) {
        if (!getMyXImage(p, i))
            return -1;
    }

    const struct fmt_entry *fmte = mp_to_x_fmt;
    while (fmte->mpfmt) {
        if (fmte->depth == p->myximage[0]->bits_per_pixel &&
            fmte->byte_order == p->myximage[0]->byte_order &&
            fmte->red_mask == p->myximage[0]->red_mask &&
            fmte->green_mask == p->myximage[0]->green_mask &&
            fmte->blue_mask == p->myximage[0]->blue_mask)
            break;
        fmte++;
    }
    if (!fmte->mpfmt) {
        MP_ERR(vo, "X server image format not supported, use another VO.\n");
        return -1;
    }

    mp_sws_set_from_cmdline(p->sws, vo->opts->sws_opts);
    p->sws->dst = (struct mp_image_params) {
        .imgfmt = fmte->mpfmt,
        .w = p->dst_w,
        .h = p->dst_h,
        .p_w = 1,
        .p_h = 1,
    };
    mp_image_params_guess_csp(&p->sws->dst);

    if (mp_sws_reinit(p->sws) < 0)
        return false;

    XFillRectangle(x11->display, x11->window, p->gc, 0, 0, vo->dwidth, vo->dheight);

    vo->want_redraw = true;
    return true;
}

static void Display_Image(struct priv *p, XImage *myximage)
{
    struct vo *vo = p->vo;

    XImage *x_image = p->myximage[p->current_buf];

#if HAVE_SHM && HAVE_XEXT
    if (p->Shmem_Flag) {
        XShmPutImage(vo->x11->display, vo->x11->window, p->gc, x_image,
                     0, 0, p->dst.x0, p->dst.y0, p->dst_w, p->dst_h,
                     True);
        vo->x11->ShmCompletionWaitCount++;
    } else
#endif
    {
        XPutImage(vo->x11->display, vo->x11->window, p->gc, x_image,
                  0, 0, p->dst.x0, p->dst.y0, p->dst_w, p->dst_h);
    }
}

static struct mp_image get_x_buffer(struct priv *p, int buf_index)
{
    struct mp_image img = {0};
    mp_image_set_params(&img, &p->sws->dst);

    img.planes[0] = p->myximage[buf_index]->data;
    img.stride[0] =
        p->image_width * ((p->myximage[buf_index]->bits_per_pixel + 7) / 8);

    return img;
}

static void wait_for_completion(struct vo *vo, int max_outstanding)
{
#if HAVE_SHM && HAVE_XEXT
    struct priv *ctx = vo->priv;
    struct vo_x11_state *x11 = vo->x11;
    if (ctx->Shmem_Flag) {
        while (x11->ShmCompletionWaitCount > max_outstanding) {
            if (!ctx->Shm_Warned_Slow) {
                MP_WARN(vo, "can't keep up! Waiting"
                            " for XShm completion events...\n");
                ctx->Shm_Warned_Slow = 1;
            }
            mp_sleep_us(1000);
            vo_x11_check_events(vo);
        }
    }
#endif
}

static void flip_page(struct vo *vo)
{
    struct priv *p = vo->priv;
    Display_Image(p, p->myximage[p->current_buf]);
    p->current_buf = (p->current_buf + 1) % 2;
}

// Note: REDRAW_FRAME can call this with NULL.
static void draw_image(struct vo *vo, mp_image_t *mpi)
{
    struct priv *p = vo->priv;

    wait_for_completion(vo, 1);

    struct mp_image img = get_x_buffer(p, p->current_buf);

    if (mpi) {
        struct mp_image src = *mpi;
        struct mp_rect src_rc = p->src;
        src_rc.x0 = MP_ALIGN_DOWN(src_rc.x0, src.fmt.align_x);
        src_rc.y0 = MP_ALIGN_DOWN(src_rc.y0, src.fmt.align_y);
        mp_image_crop_rc(&src, src_rc);

        mp_sws_scale(p->sws, &img, &src);
    } else {
        mp_image_clear(&img, 0, 0, img.w, img.h);
    }

    osd_draw_on_image(vo->osd, p->osd, mpi ? mpi->pts : 0, 0, &img);

    if (mpi != p->original_image) {
        talloc_free(p->original_image);
        p->original_image = mpi;
    }
}

static int query_format(struct vo *vo, int format)
{
    if (sws_isSupportedInput(imgfmt2pixfmt(format)))
        return 1;
    return 0;
}

static void uninit(struct vo *vo)
{
    struct priv *p = vo->priv;
    if (p->myximage[0])
        freeMyXImage(p, 0);
    if (p->myximage[1])
        freeMyXImage(p, 1);
    if (p->gc)
        XFreeGC(vo->x11->display, p->gc);

    talloc_free(p->original_image);

    vo_x11_uninit(vo);
}

static int preinit(struct vo *vo)
{
    struct priv *p = vo->priv;
    p->vo = vo;
    p->sws = mp_sws_alloc(vo);

    if (!vo_x11_init(vo))
        goto error;
    struct vo_x11_state *x11 = vo->x11;

    XWindowAttributes attribs;
    XGetWindowAttributes(x11->display, x11->rootwin, &attribs);
    p->depth = attribs.depth;

    if (!XMatchVisualInfo(x11->display, x11->screen, p->depth,
                          TrueColor, &p->vinfo))
        goto error;

    MP_VERBOSE(vo, "selected visual: %d\n", (int)p->vinfo.visualid);

    if (!vo_x11_create_vo_window(vo, &p->vinfo, "x11"))
        goto error;

    p->gc = XCreateGC(x11->display, x11->window, 0, NULL);
    MP_WARN(vo, "Warning: this legacy VO has bad performance. Consider fixing "
                "your graphics drivers, or not forcing the x11 VO.\n");
    return 0;

error:
    uninit(vo);
    return -1;
}

static int control(struct vo *vo, uint32_t request, void *data)
{
    struct priv *p = vo->priv;
    switch (request) {
    case VOCTRL_SET_PANSCAN:
        if (vo->config_ok)
            resize(vo);
        return VO_TRUE;
    case VOCTRL_REDRAW_FRAME:
        draw_image(vo, p->original_image);
        return true;
    }

    int events = 0;
    int r = vo_x11_control(vo, &events, request, data);
    if (vo->config_ok && (events & (VO_EVENT_EXPOSE | VO_EVENT_RESIZE)))
        resize(vo);
    vo_event(vo, events);
    return r;
}

const struct vo_driver video_out_x11 = {
    .description = "X11 (slow, old crap)",
    .name = "x11",
    .priv_size = sizeof(struct priv),
    .preinit = preinit,
    .query_format = query_format,
    .reconfig = reconfig,
    .control = control,
    .draw_image = draw_image,
    .flip_page = flip_page,
    .wakeup = vo_x11_wakeup,
    .wait_events = vo_x11_wait_events,
    .uninit = uninit,
};
Exemplo n.º 13
0
static void modeset_destroy_fb(int fd, struct modeset_buf *buf)
{
    if (buf->map) {
        munmap(buf->map, buf->size);
    }
    if (buf->fb) {
        drmModeRmFB(fd, buf->fb);
    }
    if (buf->handle) {
        struct drm_mode_destroy_dumb dreq = {
            .handle = buf->handle,
        };
        drmIoctl(fd, DRM_IOCTL_MODE_DESTROY_DUMB, &dreq);
    }
}

static int modeset_create_fb(struct vo *vo, int fd, struct modeset_buf *buf)
{
    int ret = 0;

    buf->handle = 0;

    // create dumb buffer
    struct drm_mode_create_dumb creq = {
        .width = buf->width,
        .height = buf->height,
        .bpp = 32,
    };
    ret = drmIoctl(fd, DRM_IOCTL_MODE_CREATE_DUMB, &creq);
    if (ret < 0) {
        MP_ERR(vo, "Cannot create dumb buffer: %s\n", mp_strerror(errno));
        ret = -errno;
        goto end;
    }
    buf->stride = creq.pitch;
    buf->size = creq.size;
    buf->handle = creq.handle;

    // create framebuffer object for the dumb-buffer
    ret = drmModeAddFB(fd, buf->width, buf->height, 24, 32, buf->stride,
                       buf->handle, &buf->fb);
    if (ret) {
        MP_ERR(vo, "Cannot create framebuffer: %s\n", mp_strerror(errno));
        ret = -errno;
        goto end;
    }

    // prepare buffer for memory mapping
    struct drm_mode_map_dumb mreq = {
        .handle = buf->handle,
    };
    ret = drmIoctl(fd, DRM_IOCTL_MODE_MAP_DUMB, &mreq);
    if (ret) {
        MP_ERR(vo, "Cannot map dumb buffer: %s\n", mp_strerror(errno));
        ret = -errno;
        goto end;
    }

    // perform actual memory mapping
    buf->map = mmap(0, buf->size, PROT_READ | PROT_WRITE, MAP_SHARED,
                    fd, mreq.offset);
    if (buf->map == MAP_FAILED) {
        MP_ERR(vo, "Cannot map dumb buffer: %s\n", mp_strerror(errno));
        ret = -errno;
        goto end;
    }

    memset(buf->map, 0, buf->size);

end:
    if (ret == 0) {
        return 0;
    }

    modeset_destroy_fb(fd, buf);
    return ret;
}

static int modeset_find_crtc(struct vo *vo, int fd, drmModeRes *res,
                             drmModeConnector *conn, struct modeset_dev *dev)
{
    for (unsigned int i = 0; i < conn->count_encoders; ++i) {
        drmModeEncoder *enc = drmModeGetEncoder(fd, conn->encoders[i]);
        if (!enc) {
            MP_WARN(vo, "Cannot retrieve encoder %u:%u: %s\n",
                    i, conn->encoders[i], mp_strerror(errno));
            continue;
        }

        // iterate all global CRTCs
        for (unsigned int j = 0; j < res->count_crtcs; ++j) {
            // check whether this CRTC works with the encoder
            if (!(enc->possible_crtcs & (1 << j)))
                continue;

            dev->enc = enc;
            dev->crtc = enc->crtc_id;
            return 0;
        }

        drmModeFreeEncoder(enc);
    }

    MP_ERR(vo, "Connector %u has no suitable CRTC\n", conn->connector_id);
    return -ENOENT;
}

static bool is_connector_valid(struct vo *vo, int conn_id,
                               drmModeConnector *conn, bool silent)
{
    if (!conn) {
        if (!silent) {
            MP_ERR(vo, "Cannot get connector %d: %s\n", conn_id,
                   mp_strerror(errno));
        }
        return false;
    }

    if (conn->connection != DRM_MODE_CONNECTED) {
        if (!silent) {
            MP_ERR(vo, "Connector %d is disconnected\n", conn_id);
        }
        return false;
    }

    if (conn->count_modes == 0) {
        if (!silent) {
            MP_ERR(vo, "Connector %d has no valid modes\n", conn_id);
        }
        return false;
    }

    return true;
}

static int modeset_prepare_dev(struct vo *vo, int fd, int conn_id,
                               struct modeset_dev **out)
{
    struct modeset_dev *dev = NULL;
    drmModeConnector *conn = NULL;

    int ret = 0;
    *out = NULL;

    drmModeRes *res = drmModeGetResources(fd);
    if (!res) {
        MP_ERR(vo, "Cannot retrieve DRM resources: %s\n", mp_strerror(errno));
        ret = -errno;
        goto end;
    }

    if (conn_id == -1) {
        // get the first connected connector
        for (int i = 0; i < res->count_connectors; i++) {
            conn = drmModeGetConnector(fd, res->connectors[i]);
            if (is_connector_valid(vo, i, conn, true)) {
                conn_id = i;
                break;
            }
            if (conn) {
                drmModeFreeConnector(conn);
                conn = NULL;
            }
        }
        if (conn_id == -1) {
            MP_ERR(vo, "No connected connectors found\n");
            ret = -ENODEV;
            goto end;
        }
    }

    if (conn_id < 0 || conn_id >= res->count_connectors) {
        MP_ERR(vo, "Bad connector ID. Max valid connector ID = %u\n",
               res->count_connectors);
        ret = -ENODEV;
        goto end;
    }

    conn = drmModeGetConnector(fd, res->connectors[conn_id]);
    if (!is_connector_valid(vo, conn_id, conn, false)) {
        ret = -ENODEV;
        goto end;
    }

    dev = talloc_zero(vo->priv, struct modeset_dev);
    dev->conn = conn->connector_id;
    dev->front_buf = 0;
    dev->mode = conn->modes[0];
    dev->bufs[0].width = conn->modes[0].hdisplay;
    dev->bufs[0].height = conn->modes[0].vdisplay;
    dev->bufs[1].width = conn->modes[0].hdisplay;
    dev->bufs[1].height = conn->modes[0].vdisplay;

    MP_INFO(vo, "Connector using mode %ux%u\n",
            dev->bufs[0].width, dev->bufs[0].height);

    ret = modeset_find_crtc(vo, fd, res, conn, dev);
    if (ret) {
        MP_ERR(vo, "Connector %d has no valid CRTC\n", conn_id);
        goto end;
    }

    for (unsigned int i = 0; i < BUF_COUNT; i++) {
        ret = modeset_create_fb(vo, fd, &dev->bufs[i]);
        if (ret) {
            MP_ERR(vo, "Cannot create framebuffer for connector %d\n",
                   conn_id);
            for (unsigned int j = 0; j < i; j++) {
                modeset_destroy_fb(fd, &dev->bufs[j]);
            }
            goto end;
        }
    }

end:
    if (conn) {
        drmModeFreeConnector(conn);
        conn = NULL;
    }
    if (res) {
        drmModeFreeResources(res);
        res = NULL;
    }
    if (ret == 0) {
        *out = dev;
    } else {
        talloc_free(dev);
    }
    return ret;
}

static void modeset_page_flipped(int fd, unsigned int frame, unsigned int sec,
                                 unsigned int usec, void *data)
{
    struct priv *p = data;
    p->pflip_happening = false;
}



static int setup_vo_crtc(struct vo *vo)
{
    struct priv *p = vo->priv;
    if (p->active)
        return 0;
    p->old_crtc = drmModeGetCrtc(p->fd, p->dev->crtc);
    int ret = drmModeSetCrtc(p->fd, p->dev->crtc,
                          p->dev->bufs[p->dev->front_buf + BUF_COUNT - 1].fb,
                          0, 0, &p->dev->conn, 1, &p->dev->mode);
    p->active = true;
    return ret;
}

static void release_vo_crtc(struct vo *vo)
{
    struct priv *p = vo->priv;

    if (!p->active)
        return;
    p->active = false;

    // wait for current page flip
    while (p->pflip_happening) {
        int ret = drmHandleEvent(p->fd, &p->ev);
        if (ret) {
            MP_ERR(vo, "drmHandleEvent failed: %i\n", ret);
            break;
        }
    }

    if (p->old_crtc) {
        drmModeSetCrtc(p->fd,
                       p->old_crtc->crtc_id,
                       p->old_crtc->buffer_id,
                       p->old_crtc->x,
                       p->old_crtc->y,
                       &p->dev->conn,
                       1,
                       &p->dev->mode);
        drmModeFreeCrtc(p->old_crtc);
        p->old_crtc = NULL;
    }
}

static void release_vt(void *data)
{
    struct vo *vo = data;
    release_vo_crtc(vo);
    if (USE_MASTER) {
        //this function enables support for switching to x, weston etc.
        //however, for whatever reason, it can be called only by root users.
        //until things change, this is commented.
        struct priv *p = vo->priv;
        if (drmDropMaster(p->fd)) {
            MP_WARN(vo, "Failed to drop DRM master: %s\n", mp_strerror(errno));
        }
    }
}

static void acquire_vt(void *data)
{
    struct vo *vo = data;
    if (USE_MASTER) {
        struct priv *p = vo->priv;
        if (drmSetMaster(p->fd)) {
            MP_WARN(vo, "Failed to acquire DRM master: %s\n", mp_strerror(errno));
        }
    }

    setup_vo_crtc(vo);
}



static int wait_events(struct vo *vo, int64_t until_time_us)
{
    struct priv *p = vo->priv;
    int64_t wait_us = until_time_us - mp_time_us();
    int timeout_ms = MPCLAMP((wait_us + 500) / 1000, 0, 10000);
    vt_switcher_poll(&p->vt_switcher, timeout_ms);
    return 0;
}

static void wakeup(struct vo *vo)
{
    struct priv *p = vo->priv;
    vt_switcher_interrupt_poll(&p->vt_switcher);
}

static int reconfig(struct vo *vo, struct mp_image_params *params, int flags)
{
    struct priv *p = vo->priv;

    vo->dwidth = p->device_w;
    vo->dheight = p->device_h;
    vo_get_src_dst_rects(vo, &p->src, &p->dst, &p->osd);

    int32_t w = p->dst.x1 - p->dst.x0;
    int32_t h = p->dst.y1 - p->dst.y0;

    // p->osd contains the parameters assuming OSD rendering in window
    // coordinates, but OSD can only be rendered in the intersection
    // between window and video rectangle (i.e. not into panscan borders).
    p->osd.w = w;
    p->osd.h = h;
    p->osd.mt = MPMIN(0, p->osd.mt);
    p->osd.mb = MPMIN(0, p->osd.mb);
    p->osd.mr = MPMIN(0, p->osd.mr);
    p->osd.ml = MPMIN(0, p->osd.ml);

    p->x = (p->device_w - w) >> 1;
    p->y = (p->device_h - h) >> 1;

    mp_sws_set_from_cmdline(p->sws, vo->opts->sws_opts);
    p->sws->src = *params;
    p->sws->dst = (struct mp_image_params) {
        .imgfmt = IMGFMT_BGR0,
        .w = w,
        .h = h,
        .d_w = w,
        .d_h = h,
    };

    talloc_free(p->cur_frame);
    p->cur_frame = mp_image_alloc(IMGFMT_BGR0, p->device_w, p->device_h);
    mp_image_params_guess_csp(&p->sws->dst);
    mp_image_set_params(p->cur_frame, &p->sws->dst);

    struct modeset_buf *buf = p->dev->bufs;
    memset(buf[0].map, 0, buf[0].size);
    memset(buf[1].map, 0, buf[1].size);

    if (mp_sws_reinit(p->sws) < 0)
        return -1;

    vo->want_redraw = true;
    return 0;
}

static void draw_image(struct vo *vo, mp_image_t *mpi)
{
    struct priv *p = vo->priv;

    if (p->active) {
        struct mp_image src = *mpi;
        struct mp_rect src_rc = p->src;
        src_rc.x0 = MP_ALIGN_DOWN(src_rc.x0, mpi->fmt.align_x);
        src_rc.y0 = MP_ALIGN_DOWN(src_rc.y0, mpi->fmt.align_y);
        mp_image_crop_rc(&src, src_rc);
        mp_sws_scale(p->sws, p->cur_frame, &src);
        osd_draw_on_image(vo->osd, p->osd, src.pts, 0, p->cur_frame);

        struct modeset_buf *front_buf = &p->dev->bufs[p->dev->front_buf];
        int32_t shift = (p->device_w * p->y + p->x) * 4;
        memcpy_pic(front_buf->map + shift,
                   p->cur_frame->planes[0],
                   (p->dst.x1 - p->dst.x0) * 4,
                   p->dst.y1 - p->dst.y0,
                   p->device_w * 4,
                   p->cur_frame->stride[0]);
    }

    if (mpi != p->last_input) {
        talloc_free(p->last_input);
        p->last_input = mpi;
    }
}

static void flip_page(struct vo *vo)
{
    struct priv *p = vo->priv;
    if (!p->active || p->pflip_happening)
        return;

    int ret = drmModePageFlip(p->fd, p->dev->crtc,
                              p->dev->bufs[p->dev->front_buf].fb,
                              DRM_MODE_PAGE_FLIP_EVENT, p);
    if (ret) {
        MP_WARN(vo, "Cannot flip page for connector\n");
    } else {
        p->dev->front_buf++;
        p->dev->front_buf %= BUF_COUNT;
        p->pflip_happening = true;
    }

    // poll page flip finish event
    const int timeout_ms = 3000;
    struct pollfd fds[1] = {
        { .events = POLLIN, .fd = p->fd },
    };
    poll(fds, 1, timeout_ms);
    if (fds[0].revents & POLLIN) {
        ret = drmHandleEvent(p->fd, &p->ev);
        if (ret != 0) {
            MP_ERR(vo, "drmHandleEvent failed: %i\n", ret);
            return;
        }
    }
}

static void uninit(struct vo *vo)
{
    struct priv *p = vo->priv;

    if (p->dev) {
        release_vo_crtc(vo);

        modeset_destroy_fb(p->fd, &p->dev->bufs[1]);
        modeset_destroy_fb(p->fd, &p->dev->bufs[0]);
        drmModeFreeEncoder(p->dev->enc);
    }

    vt_switcher_destroy(&p->vt_switcher);
    talloc_free(p->last_input);
    talloc_free(p->cur_frame);
    talloc_free(p->dev);
    close(p->fd);
}

static int preinit(struct vo *vo)
{
    struct priv *p = vo->priv;
    p->sws = mp_sws_alloc(vo);
    p->fd = -1;
    p->ev.version = DRM_EVENT_CONTEXT_VERSION;
    p->ev.page_flip_handler = modeset_page_flipped;

    if (vt_switcher_init(&p->vt_switcher, vo->log))
        goto err;

    vt_switcher_acquire(&p->vt_switcher, acquire_vt, vo);
    vt_switcher_release(&p->vt_switcher, release_vt, vo);

    if (modeset_open(vo, &p->fd, p->device_path))
        goto err;

    if (modeset_prepare_dev(vo, p->fd, p->connector_id, &p->dev))
        goto err;

    assert(p->dev);
    p->device_w = p->dev->bufs[0].width;
    p->device_h = p->dev->bufs[0].height;

    if (setup_vo_crtc(vo)) {
        MP_ERR(vo, "Cannot set CRTC for connector %u: %s\n", p->connector_id,
               mp_strerror(errno));
        goto err;
    }

    return 0;

err:
    uninit(vo);
    return -1;
}

static int query_format(struct vo *vo, int format)
{
    return sws_isSupportedInput(imgfmt2pixfmt(format));
}

static int control(struct vo *vo, uint32_t request, void *data)
{
    struct priv *p = vo->priv;
    switch (request) {
    case VOCTRL_SCREENSHOT_WIN:
        *(struct mp_image**)data = mp_image_new_copy(p->cur_frame);
        return VO_TRUE;
    case VOCTRL_REDRAW_FRAME:
        draw_image(vo, p->last_input);
        return VO_TRUE;
    case VOCTRL_GET_PANSCAN:
        return VO_TRUE;
    case VOCTRL_SET_PANSCAN:
        if (vo->config_ok)
            reconfig(vo, vo->params, 0);
        return VO_TRUE;
    }
    return VO_NOTIMPL;
}

#define OPT_BASE_STRUCT struct priv

const struct vo_driver video_out_drm = {
    .name = "drm",
    .description = "Direct Rendering Manager",
    .preinit = preinit,
    .query_format = query_format,
    .reconfig = reconfig,
    .control = control,
    .draw_image = draw_image,
    .flip_page = flip_page,
    .uninit = uninit,
    .wait_events = wait_events,
    .wakeup = wakeup,
    .priv_size = sizeof(struct priv),
    .options = (const struct m_option[]) {
        OPT_STRING("devpath", device_path, 0),
        OPT_INT("connector", connector_id, 0),
        {0},
    },
    .priv_defaults = &(const struct priv) {
Exemplo n.º 14
0
static int config(struct vf_instance *vf,
        int width, int height, int d_width, int d_height,
	unsigned int flags, unsigned int outfmt){
    unsigned int best=find_best_out(vf, outfmt);
    int vo_flags;
    int int_sws_flags=0;
    int round_w=0, round_h=0;
    int i;
    SwsFilter *srcFilter, *dstFilter;
    enum PixelFormat dfmt, sfmt;

    if(!best){
	mp_msg(MSGT_VFILTER,MSGL_WARN,"SwScale: no supported outfmt found :(\n");
	return 0;
    }
    sfmt = imgfmt2pixfmt(outfmt);
    if (outfmt == IMGFMT_RGB8 || outfmt == IMGFMT_BGR8) sfmt = PIX_FMT_PAL8;
    dfmt = imgfmt2pixfmt(best);

    vo_flags=vf->next->query_format(vf->next,best);

    // scaling to dwidth*d_height, if all these TRUE:
    // - option -zoom
    // - no other sw/hw up/down scaling avail.
    // - we're after postproc
    // - user didn't set w:h
    if(!(vo_flags&VFCAP_POSTPROC) && (flags&4) &&
	    vf->priv->w<0 && vf->priv->h<0){	// -zoom
	int x=(vo_flags&VFCAP_SWSCALE) ? 0 : 1;
	if(d_width<width || d_height<height){
	    // downscale!
	    if(vo_flags&VFCAP_HWSCALE_DOWN) x=0;
	} else {
	    // upscale:
	    if(vo_flags&VFCAP_HWSCALE_UP) x=0;
	}
	if(x){
	    // user wants sw scaling! (-zoom)
	    vf->priv->w=d_width;
	    vf->priv->h=d_height;
	}
    }

    if(vf->priv->noup){
        if((vf->priv->w > width) + (vf->priv->h > height) >= vf->priv->noup){
            vf->priv->w= width;
            vf->priv->h= height;
        }
    }

    if (vf->priv->w <= -8) {
      vf->priv->w += 8;
      round_w = 1;
    }
    if (vf->priv->h <= -8) {
      vf->priv->h += 8;
      round_h = 1;
    }

    if (vf->priv->w < -3 || vf->priv->h < -3 ||
         (vf->priv->w < -1 && vf->priv->h < -1)) {
      // TODO: establish a direct connection to the user's brain
      // and find out what the heck he thinks MPlayer should do
      // with this nonsense.
      mp_msg(MSGT_VFILTER, MSGL_ERR, "SwScale: EUSERBROKEN Check your parameters, they make no sense!\n");
      return 0;
    }

    if (vf->priv->w == -1)
      vf->priv->w = width;
    if (vf->priv->w == 0)
      vf->priv->w = d_width;

    if (vf->priv->h == -1)
      vf->priv->h = height;
    if (vf->priv->h == 0)
      vf->priv->h = d_height;

    if (vf->priv->w == -3)
      vf->priv->w = vf->priv->h * width / height;
    if (vf->priv->w == -2)
      vf->priv->w = vf->priv->h * d_width / d_height;

    if (vf->priv->h == -3)
      vf->priv->h = vf->priv->w * height / width;
    if (vf->priv->h == -2)
      vf->priv->h = vf->priv->w * d_height / d_width;

    if (round_w)
      vf->priv->w = ((vf->priv->w + 8) / 16) * 16;
    if (round_h)
      vf->priv->h = ((vf->priv->h + 8) / 16) * 16;

    // calculate the missing parameters:
    switch(best) {
    case IMGFMT_YV12:		/* YV12 needs w & h rounded to 2 */
    case IMGFMT_I420:
    case IMGFMT_IYUV:
    case IMGFMT_NV12:
    case IMGFMT_NV21:
      vf->priv->h = (vf->priv->h + 1) & ~1;
    case IMGFMT_YUY2:		/* YUY2 needs w rounded to 2 */
    case IMGFMT_UYVY:
      vf->priv->w = (vf->priv->w + 1) & ~1;
    }

    mp_msg(MSGT_VFILTER,MSGL_DBG2,"SwScale: scaling %dx%d %s to %dx%d %s  \n",
	width,height,vo_format_name(outfmt),
	vf->priv->w,vf->priv->h,vo_format_name(best));

    // free old ctx:
    if(vf->priv->ctx) sws_freeContext(vf->priv->ctx);
    if(vf->priv->ctx2)sws_freeContext(vf->priv->ctx2);

    // new swscaler:
    sws_getFlagsAndFilterFromCmdLine(&int_sws_flags, &srcFilter, &dstFilter);
    int_sws_flags|= vf->priv->v_chr_drop << SWS_SRC_V_CHR_DROP_SHIFT;
    int_sws_flags|= vf->priv->accurate_rnd * SWS_ACCURATE_RND;
    vf->priv->ctx=sws_getContext(width, height >> vf->priv->interlaced,
	    sfmt,
		  vf->priv->w, vf->priv->h >> vf->priv->interlaced,
	    dfmt,
	    int_sws_flags | get_sws_cpuflags(), srcFilter, dstFilter, vf->priv->param);
    if(vf->priv->interlaced){
        vf->priv->ctx2=sws_getContext(width, height >> 1,
	    sfmt,
		  vf->priv->w, vf->priv->h >> 1,
	    dfmt,
	    int_sws_flags | get_sws_cpuflags(), srcFilter, dstFilter, vf->priv->param);
    }
Exemplo n.º 15
0
static int query_format(struct vf_instance *vf, unsigned int fmt)
{
    if (IMGFMT_IS_HWACCEL(fmt) || sws_isSupportedInput(imgfmt2pixfmt(fmt)) < 1)
        return 0;
    return !!find_best_out(vf, fmt);
}
Exemplo n.º 16
0
static void fix_parameters(muxer_stream_t *stream)
{
	muxer_stream_priv_t *spriv = stream->priv;
	AVCodecContext *ctx = spriv->avstream->codec;

        ctx->bit_rate= stream->avg_rate;
        ctx->rc_buffer_size= stream->vbv_size;
        ctx->rc_max_rate= stream->max_rate;

	if(stream->type == MUXER_TYPE_AUDIO)
	{
		if (!ctx->bit_rate)
		    ctx->bit_rate = stream->wf->nAvgBytesPerSec * 8;
		ctx->codec_id = mp_tag2codec_id(stream->wf->wFormatTag, 1);
#if 0 //breaks aac in mov at least
		ctx->codec_tag = codec_get_wav_tag(ctx->codec_id);
#endif
		mp_msg(MSGT_MUXER, MSGL_INFO, "AUDIO CODEC ID: %x, TAG: %x\n", ctx->codec_id, (uint32_t) ctx->codec_tag);
		ctx->sample_rate = stream->wf->nSamplesPerSec;
//                mp_msg(MSGT_MUXER, MSGL_INFO, "stream->h.dwSampleSize: %d\n", stream->h.dwSampleSize);
		ctx->channels = stream->wf->nChannels;
                if(stream->h.dwRate && (stream->h.dwScale * (int64_t)ctx->sample_rate) % stream->h.dwRate == 0)
                    ctx->frame_size= (stream->h.dwScale * (int64_t)ctx->sample_rate) / stream->h.dwRate;
                mp_msg(MSGT_MUXER, MSGL_V, "MUXER_LAVF(audio stream) frame_size: %d, scale: %u, sps: %u, rate: %u, ctx->block_align = stream->wf->nBlockAlign; %d=%d stream->wf->nAvgBytesPerSec:%d\n",
			ctx->frame_size, stream->h.dwScale, ctx->sample_rate, stream->h.dwRate,
			ctx->block_align, stream->wf->nBlockAlign, stream->wf->nAvgBytesPerSec);
		ctx->block_align = stream->h.dwSampleSize;
		if(stream->wf+1 && stream->wf->cbSize)
		{
			ctx->extradata = av_malloc(stream->wf->cbSize);
			if(ctx->extradata != NULL)
			{
				ctx->extradata_size = stream->wf->cbSize;
				memcpy(ctx->extradata, stream->wf+1, ctx->extradata_size);
			}
			else
				mp_msg(MSGT_MUXER, MSGL_ERR, "MUXER_LAVF(audio stream) error! Could not allocate %d bytes for extradata.\n",
					stream->wf->cbSize);
		}
	}
	else if(stream->type == MUXER_TYPE_VIDEO)
	{
		ctx->codec_id = mp_tag2codec_id(stream->bih->biCompression, 0);
                if(ctx->codec_id <= 0 || force_fourcc)
                    ctx->codec_tag= stream->bih->biCompression;
		mp_msg(MSGT_MUXER, MSGL_INFO, "VIDEO CODEC ID: %d\n", ctx->codec_id);
		if (stream->imgfmt)
		    ctx->pix_fmt = imgfmt2pixfmt(stream->imgfmt);
		ctx->width = stream->bih->biWidth;
		ctx->height = stream->bih->biHeight;
		ctx->bit_rate = 800000;
		ctx->time_base.den = stream->h.dwRate;
		ctx->time_base.num = stream->h.dwScale;
		if(stream->bih->biSize > sizeof(*stream->bih))
		{
			ctx->extradata_size = stream->bih->biSize - sizeof(*stream->bih);
			ctx->extradata = av_malloc(ctx->extradata_size);
			if(ctx->extradata != NULL)
				memcpy(ctx->extradata, stream->bih+1, ctx->extradata_size);
			else
			{
				mp_msg(MSGT_MUXER, MSGL_ERR, "MUXER_LAVF(video stream) error! Could not allocate %d bytes for extradata.\n",
					ctx->extradata_size);
				ctx->extradata_size = 0;
			}
		}
	}
}
Exemplo n.º 17
0
Arquivo: f_lavfi.c Projeto: Akemi/mpv
// Attempt to initialize all pads. Return true if all are initialized, or
// false if more data is needed (or on error).
static bool init_pads(struct lavfi *c)
{
    if (!c->graph)
        goto error;

    for (int n = 0; n < c->num_out_pads; n++) {
        struct lavfi_pad *pad = c->out_pads[n];
        if (pad->buffer)
            continue;

        const AVFilter *dst_filter = NULL;
        if (pad->type == MP_FRAME_AUDIO) {
            dst_filter = avfilter_get_by_name("abuffersink");
        } else if (pad->type == MP_FRAME_VIDEO) {
            dst_filter = avfilter_get_by_name("buffersink");
        } else {
            assert(0);
        }

        if (!dst_filter)
            goto error;

        char name[256];
        snprintf(name, sizeof(name), "mpv_sink_%s", pad->name);

        if (avfilter_graph_create_filter(&pad->buffer, dst_filter,
                                         name, NULL, NULL, c->graph) < 0)
            goto error;

        if (avfilter_link(pad->filter, pad->filter_pad, pad->buffer, 0) < 0)
            goto error;
    }

    for (int n = 0; n < c->num_in_pads; n++) {
        struct lavfi_pad *pad = c->in_pads[n];
        if (pad->buffer)
            continue;

        mp_frame_unref(&pad->in_fmt);

        read_pad_input(c, pad);
        // no input data, format unknown, can't init, wait longer.
        if (!pad->pending.type)
            return false;

        if (mp_frame_is_data(pad->pending)) {
            assert(pad->pending.type == pad->type);

            pad->in_fmt = mp_frame_ref(pad->pending);
            if (!pad->in_fmt.type)
                goto error;

            if (pad->in_fmt.type == MP_FRAME_VIDEO)
                mp_image_unref_data(pad->in_fmt.data);
            if (pad->in_fmt.type == MP_FRAME_AUDIO)
                mp_aframe_unref_data(pad->in_fmt.data);
        }

        if (pad->pending.type == MP_FRAME_EOF && !pad->in_fmt.type) {
            // libavfilter makes this painful. Init it with a dummy config,
            // just so we can tell it the stream is EOF.
            if (pad->type == MP_FRAME_AUDIO) {
                struct mp_aframe *fmt = mp_aframe_create();
                mp_aframe_set_format(fmt, AF_FORMAT_FLOAT);
                mp_aframe_set_chmap(fmt, &(struct mp_chmap)MP_CHMAP_INIT_STEREO);
                mp_aframe_set_rate(fmt, 48000);
                pad->in_fmt = (struct mp_frame){MP_FRAME_AUDIO, fmt};
            }
            if (pad->type == MP_FRAME_VIDEO) {
                struct mp_image *fmt = talloc_zero(NULL, struct mp_image);
                mp_image_setfmt(fmt, IMGFMT_420P);
                mp_image_set_size(fmt, 64, 64);
                pad->in_fmt = (struct mp_frame){MP_FRAME_VIDEO, fmt};
            }
        }

        if (pad->in_fmt.type != pad->type)
            goto error;

        AVBufferSrcParameters *params = av_buffersrc_parameters_alloc();
        if (!params)
            goto error;

        pad->timebase = AV_TIME_BASE_Q;

        char *filter_name = NULL;
        if (pad->type == MP_FRAME_AUDIO) {
            struct mp_aframe *fmt = pad->in_fmt.data;
            params->format = af_to_avformat(mp_aframe_get_format(fmt));
            params->sample_rate = mp_aframe_get_rate(fmt);
            struct mp_chmap chmap = {0};
            mp_aframe_get_chmap(fmt, &chmap);
            params->channel_layout = mp_chmap_to_lavc(&chmap);
            pad->timebase = (AVRational){1, mp_aframe_get_rate(fmt)};
            filter_name = "abuffer";
        } else if (pad->type == MP_FRAME_VIDEO) {
            struct mp_image *fmt = pad->in_fmt.data;
            params->format = imgfmt2pixfmt(fmt->imgfmt);
            params->width = fmt->w;
            params->height = fmt->h;
            params->sample_aspect_ratio.num = fmt->params.p_w;
            params->sample_aspect_ratio.den = fmt->params.p_h;
            params->hw_frames_ctx = fmt->hwctx;
            params->frame_rate = av_d2q(fmt->nominal_fps, 1000000);
            filter_name = "buffer";
        } else {
            assert(0);
        }

        params->time_base = pad->timebase;

        const AVFilter *filter = avfilter_get_by_name(filter_name);
        if (filter) {
            char name[256];
            snprintf(name, sizeof(name), "mpv_src_%s", pad->name);

            pad->buffer = avfilter_graph_alloc_filter(c->graph, filter, name);
        }
        if (!pad->buffer) {
            av_free(params);
            goto error;
        }

        int ret = av_buffersrc_parameters_set(pad->buffer, params);
        av_free(params);
        if (ret < 0)
            goto error;

        if (avfilter_init_str(pad->buffer, NULL) < 0)
            goto error;

        if (avfilter_link(pad->buffer, 0, pad->filter, pad->filter_pad) < 0)
            goto error;
    }

    return true;
error:
    MP_FATAL(c, "could not initialize filter pads\n");
    c->failed = true;
    mp_filter_internal_mark_failed(c->f);
    return false;
}
Exemplo n.º 18
0
static int config(struct vf_instance *vf,
        int width, int height, int d_width, int d_height,
	unsigned int flags, unsigned int outfmt){
    int size, i;
    char *p;
    AVDictionary *opts = NULL;

    mux_v->bih->biWidth=width;
    mux_v->bih->biHeight=height;
    mux_v->bih->biSizeImage=mux_v->bih->biWidth*mux_v->bih->biHeight*(mux_v->bih->biBitCount/8);

    mp_msg(MSGT_MENCODER, MSGL_INFO,"videocodec: libavcodec (%dx%d fourcc=%x [%.4s])\n",
	mux_v->bih->biWidth, mux_v->bih->biHeight, mux_v->bih->biCompression,
	    (char *)&mux_v->bih->biCompression);

    lavc_venc_context->width = width;
    lavc_venc_context->height = height;
    if (lavc_param_vbitrate > 16000) /* != -1 */
	lavc_venc_context->bit_rate = lavc_param_vbitrate;
    else if (lavc_param_vbitrate >= 0) /* != -1 */
	lavc_venc_context->bit_rate = lavc_param_vbitrate*1000;
    else
	lavc_venc_context->bit_rate = 800000; /* default */

    mux_v->avg_rate= lavc_venc_context->bit_rate;

    lavc_venc_context->bit_rate_tolerance= lavc_param_vrate_tolerance*1000;
    lavc_venc_context->time_base= (AVRational){mux_v->h.dwScale, mux_v->h.dwRate};
    lavc_venc_context->qmin= lavc_param_vqmin;
    lavc_venc_context->qmax= lavc_param_vqmax;
    lavc_venc_context->lmin= (int)(FF_QP2LAMBDA * lavc_param_lmin + 0.5);
    lavc_venc_context->lmax= (int)(FF_QP2LAMBDA * lavc_param_lmax + 0.5);
    lavc_venc_context->mb_lmin= (int)(FF_QP2LAMBDA * lavc_param_mb_lmin + 0.5);
    lavc_venc_context->mb_lmax= (int)(FF_QP2LAMBDA * lavc_param_mb_lmax + 0.5);
    lavc_venc_context->max_qdiff= lavc_param_vqdiff;
    lavc_venc_context->qcompress= lavc_param_vqcompress;
    lavc_venc_context->qblur= lavc_param_vqblur;
    lavc_venc_context->max_b_frames= lavc_param_vmax_b_frames;
    lavc_venc_context->b_quant_factor= lavc_param_vb_qfactor;
    lavc_venc_context->rc_strategy= lavc_param_vrc_strategy;
    lavc_venc_context->b_frame_strategy= lavc_param_vb_strategy;
    lavc_venc_context->b_quant_offset= (int)(FF_QP2LAMBDA * lavc_param_vb_qoffset + 0.5);
    lavc_venc_context->luma_elim_threshold= lavc_param_luma_elim_threshold;
    lavc_venc_context->chroma_elim_threshold= lavc_param_chroma_elim_threshold;
    lavc_venc_context->rtp_payload_size= lavc_param_packet_size;
    lavc_venc_context->strict_std_compliance= lavc_param_strict;
    lavc_venc_context->i_quant_factor= lavc_param_vi_qfactor;
    lavc_venc_context->i_quant_offset= (int)(FF_QP2LAMBDA * lavc_param_vi_qoffset + 0.5);
    lavc_venc_context->rc_qsquish= lavc_param_rc_qsquish;
    lavc_venc_context->rc_qmod_amp= lavc_param_rc_qmod_amp;
    lavc_venc_context->rc_qmod_freq= lavc_param_rc_qmod_freq;
    lavc_venc_context->rc_eq= lavc_param_rc_eq;

    mux_v->max_rate=
    lavc_venc_context->rc_max_rate= lavc_param_rc_max_rate*1000;
    lavc_venc_context->rc_min_rate= lavc_param_rc_min_rate*1000;

    mux_v->vbv_size=
    lavc_venc_context->rc_buffer_size= lavc_param_rc_buffer_size*1000;

    lavc_venc_context->rc_initial_buffer_occupancy=
            lavc_venc_context->rc_buffer_size *
            lavc_param_rc_initial_buffer_occupancy;
    lavc_venc_context->rc_buffer_aggressivity= lavc_param_rc_buffer_aggressivity;
    lavc_venc_context->rc_initial_cplx= lavc_param_rc_initial_cplx;
    lavc_venc_context->debug= lavc_param_debug;
    lavc_venc_context->last_predictor_count= lavc_param_last_pred;
    lavc_venc_context->pre_me= lavc_param_pre_me;
    lavc_venc_context->me_pre_cmp= lavc_param_me_pre_cmp;
    lavc_venc_context->pre_dia_size= lavc_param_pre_dia_size;
    lavc_venc_context->me_subpel_quality= lavc_param_me_subpel_quality;
    lavc_venc_context->me_range= lavc_param_me_range;
    lavc_venc_context->intra_quant_bias= lavc_param_ibias;
    lavc_venc_context->inter_quant_bias= lavc_param_pbias;
    lavc_venc_context->coder_type= lavc_param_coder;
    lavc_venc_context->context_model= lavc_param_context;
    lavc_venc_context->scenechange_threshold= lavc_param_sc_threshold;
    lavc_venc_context->noise_reduction= lavc_param_noise_reduction;
    lavc_venc_context->quantizer_noise_shaping= lavc_param_qns;
    lavc_venc_context->inter_threshold= lavc_param_inter_threshold;
    lavc_venc_context->nsse_weight= lavc_param_nssew;
    lavc_venc_context->frame_skip_threshold= lavc_param_skip_threshold;
    lavc_venc_context->frame_skip_factor= lavc_param_skip_factor;
    lavc_venc_context->frame_skip_exp= lavc_param_skip_exp;
    lavc_venc_context->frame_skip_cmp= lavc_param_skip_cmp;

    if (lavc_param_intra_matrix)
    {
	char *tmp;

	lavc_venc_context->intra_matrix =
	    av_malloc(sizeof(*lavc_venc_context->intra_matrix)*64);

	i = 0;
	while ((tmp = strsep(&lavc_param_intra_matrix, ",")) && (i < 64))
	{
	    if (!tmp || (tmp && !strlen(tmp)))
		break;
	    lavc_venc_context->intra_matrix[i++] = atoi(tmp);
	}

	if (i != 64)
	    av_freep(&lavc_venc_context->intra_matrix);
	else
	    mp_msg(MSGT_MENCODER, MSGL_V, "Using user specified intra matrix\n");
    }
    if (lavc_param_inter_matrix)
    {
	char *tmp;

	lavc_venc_context->inter_matrix =
	    av_malloc(sizeof(*lavc_venc_context->inter_matrix)*64);

	i = 0;
	while ((tmp = strsep(&lavc_param_inter_matrix, ",")) && (i < 64))
	{
	    if (!tmp || (tmp && !strlen(tmp)))
		break;
	    lavc_venc_context->inter_matrix[i++] = atoi(tmp);
	}

	if (i != 64)
	    av_freep(&lavc_venc_context->inter_matrix);
	else
	    mp_msg(MSGT_MENCODER, MSGL_V, "Using user specified inter matrix\n");
    }

    p= lavc_param_rc_override_string;
    for(i=0; p; i++){
        int start, end, q;
        int e=sscanf(p, "%d,%d,%d", &start, &end, &q);
        if(e!=3){
	    mp_msg(MSGT_MENCODER,MSGL_ERR,"error parsing vrc_q\n");
            return 0;
        }
        lavc_venc_context->rc_override=
            realloc(lavc_venc_context->rc_override, sizeof(RcOverride)*(i+1));
        lavc_venc_context->rc_override[i].start_frame= start;
        lavc_venc_context->rc_override[i].end_frame  = end;
        if(q>0){
            lavc_venc_context->rc_override[i].qscale= q;
            lavc_venc_context->rc_override[i].quality_factor= 1.0;
        }
        else{
            lavc_venc_context->rc_override[i].qscale= 0;
            lavc_venc_context->rc_override[i].quality_factor= -q/100.0;
        }
        p= strchr(p, '/');
        if(p) p++;
    }
    lavc_venc_context->rc_override_count=i;

    lavc_venc_context->mpeg_quant=lavc_param_mpeg_quant;

    lavc_venc_context->dct_algo= lavc_param_fdct;
    lavc_venc_context->idct_algo= lavc_param_idct;

    lavc_venc_context->lumi_masking= lavc_param_lumi_masking;
    lavc_venc_context->temporal_cplx_masking= lavc_param_temporal_cplx_masking;
    lavc_venc_context->spatial_cplx_masking= lavc_param_spatial_cplx_masking;
    lavc_venc_context->p_masking= lavc_param_p_masking;
    lavc_venc_context->dark_masking= lavc_param_dark_masking;
        lavc_venc_context->border_masking = lavc_param_border_masking;

    if (lavc_param_aspect != NULL)
    {
	int par_width, par_height, e;
	float ratio=0;

	e= sscanf (lavc_param_aspect, "%d/%d", &par_width, &par_height);
	if(e==2){
            if(par_height)
                ratio= (float)par_width / (float)par_height;
        }else{
	    e= sscanf (lavc_param_aspect, "%f", &ratio);
	}

	if (e && ratio > 0.1 && ratio < 10.0) {
	    lavc_venc_context->sample_aspect_ratio= av_d2q(ratio * height / width, 255);
	    mp_dbg(MSGT_MENCODER, MSGL_DBG2, "sample_aspect_ratio: %d/%d\n",
                lavc_venc_context->sample_aspect_ratio.num,
                lavc_venc_context->sample_aspect_ratio.den);
	    mux_v->aspect = ratio;
	    mp_dbg(MSGT_MENCODER, MSGL_DBG2, "aspect_ratio: %f\n", ratio);
	} else {
	    mp_dbg(MSGT_MENCODER, MSGL_ERR, "aspect ratio: cannot parse \"%s\"\n", lavc_param_aspect);
	    return 0;
	}
    }
    else if (lavc_param_autoaspect) {
	lavc_venc_context->sample_aspect_ratio = av_d2q((float)d_width/d_height*height / width, 255);
	mux_v->aspect = (float)d_width/d_height;
    }

    /* keyframe interval */
    if (lavc_param_keyint >= 0) /* != -1 */
	lavc_venc_context->gop_size = lavc_param_keyint;
    else
	lavc_venc_context->gop_size = 250; /* default */

    lavc_venc_context->flags = 0;
    if (lavc_param_mb_decision)
    {
	mp_msg(MSGT_MENCODER, MSGL_INFO, MSGTR_MPCODECS_HighQualityEncodingSelected);
        lavc_venc_context->mb_decision= lavc_param_mb_decision;
    }

    lavc_venc_context->me_cmp= lavc_param_me_cmp;
    lavc_venc_context->me_sub_cmp= lavc_param_me_sub_cmp;
    lavc_venc_context->mb_cmp= lavc_param_mb_cmp;
#ifdef FF_CMP_VSAD
    lavc_venc_context->ildct_cmp= lavc_param_ildct_cmp;
#endif
    lavc_venc_context->dia_size= lavc_param_dia_size;
    lavc_venc_context->flags|= lavc_param_qpel;
    lavc_venc_context->trellis = lavc_param_trell;
    lavc_venc_context->flags|= lavc_param_lowdelay;
    lavc_venc_context->flags|= lavc_param_bit_exact;
    lavc_venc_context->flags|= lavc_param_aic;
    if (lavc_param_aiv)
        av_dict_set(&opts, "aiv", "1", 0);
    if (lavc_param_umv)
        av_dict_set(&opts, "umv", "1", 0);
    if (lavc_param_obmc)
        av_dict_set(&opts, "obmc", "1", 0);
    lavc_venc_context->flags|= lavc_param_loop;
    lavc_venc_context->flags|= lavc_param_v4mv ? CODEC_FLAG_4MV : 0;
    if (lavc_param_data_partitioning)
        av_dict_set(&opts, "data_partitioning", "1", 0);
    lavc_venc_context->flags|= lavc_param_cbp;
    lavc_venc_context->flags|= lavc_param_mv0;
    lavc_venc_context->flags|= lavc_param_qp_rd;
    if (lavc_param_ss)
        av_dict_set(&opts, "structured_slices", "1", 0);
    if (lavc_param_alt)
        av_dict_set(&opts, "alternate_scan", "1", 0);
    lavc_venc_context->flags|= lavc_param_ilme;
    lavc_venc_context->flags|= lavc_param_gmc;
#ifdef CODEC_FLAG_CLOSED_GOP
    lavc_venc_context->flags|= lavc_param_closed_gop;
#endif
    lavc_venc_context->flags|= lavc_param_gray;

    if(lavc_param_normalize_aqp) lavc_venc_context->flags|= CODEC_FLAG_NORMALIZE_AQP;
    if(lavc_param_interlaced_dct) lavc_venc_context->flags|= CODEC_FLAG_INTERLACED_DCT;
    lavc_venc_context->flags|= lavc_param_psnr;
    lavc_venc_context->intra_dc_precision = lavc_param_dc_precision - 8;
    lavc_venc_context->prediction_method= lavc_param_prediction_method;
    lavc_venc_context->brd_scale = lavc_param_brd_scale;
    lavc_venc_context->bidir_refine = lavc_param_bidir_refine;
    lavc_venc_context->scenechange_factor = lavc_param_sc_factor;
    if((lavc_param_video_global_header&1)
       /*|| (video_global_header==0 && (oc->oformat->flags & AVFMT_GLOBALHEADER))*/){
        lavc_venc_context->flags |= CODEC_FLAG_GLOBAL_HEADER;
    }
    if(lavc_param_video_global_header&2){
        lavc_venc_context->flags2 |= CODEC_FLAG2_LOCAL_HEADER;
    }
    lavc_venc_context->mv0_threshold = lavc_param_mv0_threshold;
    lavc_venc_context->refs = lavc_param_refs;
    lavc_venc_context->b_sensitivity = lavc_param_b_sensitivity;
    lavc_venc_context->level = lavc_param_level;

    if(lavc_param_avopt){
        if(parse_avopts(lavc_venc_context, lavc_param_avopt) < 0){
            mp_msg(MSGT_MENCODER,MSGL_ERR, "Your options /%s/ look like gibberish to me pal\n", lavc_param_avopt);
            return 0;
        }
    }

    mux_v->imgfmt = lavc_param_format;
    lavc_venc_context->pix_fmt = imgfmt2pixfmt(lavc_param_format);
    if (lavc_venc_context->pix_fmt == PIX_FMT_NONE)
        return 0;

    if(!stats_file) {
    /* lavc internal 2pass bitrate control */
    switch(lavc_param_vpass){
    case 2:
    case 3:
	lavc_venc_context->flags|= CODEC_FLAG_PASS2;
	stats_file= fopen(passtmpfile, "rb");
	if(stats_file==NULL){
	    mp_msg(MSGT_MENCODER,MSGL_ERR,"2pass failed: filename=%s\n", passtmpfile);
            return 0;
	}
	fseek(stats_file, 0, SEEK_END);
	size= ftell(stats_file);
	fseek(stats_file, 0, SEEK_SET);

	lavc_venc_context->stats_in= av_malloc(size + 1);
	lavc_venc_context->stats_in[size]=0;

	if(fread(lavc_venc_context->stats_in, size, 1, stats_file)<1){
	    mp_msg(MSGT_MENCODER,MSGL_ERR,"2pass failed: reading from filename=%s\n", passtmpfile);
            return 0;
	}
	if(lavc_param_vpass == 2)
	    break;
	else
	    fclose(stats_file);
	    /* fall through */
    case 1:
	lavc_venc_context->flags|= CODEC_FLAG_PASS1;
	stats_file= fopen(passtmpfile, "wb");
	if(stats_file==NULL){
	    mp_msg(MSGT_MENCODER,MSGL_ERR,"2pass failed: filename=%s\n", passtmpfile);
            return 0;
	}
	if(lavc_param_turbo && (lavc_param_vpass == 1)) {
	  /* uses SAD comparison functions instead of other hungrier */
	  lavc_venc_context->me_pre_cmp = 0;
	  lavc_venc_context->me_cmp = 0;
	  lavc_venc_context->me_sub_cmp = 0;
	  lavc_venc_context->mb_cmp = 2;

	  /* Disables diamond motion estimation */
	  lavc_venc_context->pre_dia_size = 0;
	  lavc_venc_context->dia_size = 1;

	  lavc_venc_context->quantizer_noise_shaping = 0; // qns=0
	  lavc_venc_context->noise_reduction = 0; // nr=0
	  lavc_venc_context->mb_decision = 0; // mbd=0 ("realtime" encoding)

	  lavc_venc_context->flags &= ~CODEC_FLAG_QPEL;
	  lavc_venc_context->flags &= ~CODEC_FLAG_4MV;
	  lavc_venc_context->trellis = 0;
	  lavc_venc_context->flags &= ~CODEC_FLAG_CBP_RD;
	  lavc_venc_context->flags &= ~CODEC_FLAG_QP_RD;
	  lavc_venc_context->flags &= ~CODEC_FLAG_MV0;
	}
	break;
    }
    }

    lavc_venc_context->me_method = ME_ZERO+lavc_param_vme;

    /* fixed qscale :p */
    if (lavc_param_vqscale >= 0.0)
    {
	mp_msg(MSGT_MENCODER, MSGL_INFO, MSGTR_MPCODECS_UsingConstantQscale, lavc_param_vqscale);
	lavc_venc_context->flags |= CODEC_FLAG_QSCALE;
        lavc_venc_context->global_quality=
	vf->priv->pic->quality = (int)(FF_QP2LAMBDA * lavc_param_vqscale + 0.5);
    }

    lavc_venc_context->thread_count = lavc_param_threads;
    lavc_venc_context->thread_type = FF_THREAD_FRAME | FF_THREAD_SLICE;

    if (avcodec_open2(lavc_venc_context, vf->priv->codec, &opts) != 0) {
	mp_msg(MSGT_MENCODER,MSGL_ERR,MSGTR_CantOpenCodec);
	return 0;
    }
    av_dict_free(&opts);

    /* free second pass buffer, its not needed anymore */
    av_freep(&lavc_venc_context->stats_in);
    if(lavc_venc_context->bits_per_coded_sample)
        mux_v->bih->biBitCount= lavc_venc_context->bits_per_coded_sample;
    if(lavc_venc_context->extradata_size){
        mux_v->bih= realloc(mux_v->bih, sizeof(*mux_v->bih) + lavc_venc_context->extradata_size);
        memcpy(mux_v->bih + 1, lavc_venc_context->extradata, lavc_venc_context->extradata_size);
        mux_v->bih->biSize= sizeof(*mux_v->bih) + lavc_venc_context->extradata_size;
    }

    mux_v->decoder_delay = lavc_venc_context->max_b_frames ? 1 : 0;

    return 1;
}
Exemplo n.º 19
0
static bool write_lavc(struct image_writer_ctx *ctx, mp_image_t *image, FILE *fp)
{
    bool success = 0;
    AVFrame *pic = NULL;
    AVPacket pkt = {0};
    int got_output = 0;

    av_init_packet(&pkt);

    struct AVCodec *codec = avcodec_find_encoder(ctx->opts->format);
    AVCodecContext *avctx = NULL;
    if (!codec)
        goto print_open_fail;
    avctx = avcodec_alloc_context3(codec);
    if (!avctx)
        goto print_open_fail;

    avctx->time_base = AV_TIME_BASE_Q;
    avctx->width = image->w;
    avctx->height = image->h;
    avctx->color_range = mp_csp_levels_to_avcol_range(image->params.color.levels);
    avctx->pix_fmt = imgfmt2pixfmt(image->imgfmt);
    // Annoying deprecated garbage for the jpg encoder.
    if (image->params.color.levels == MP_CSP_LEVELS_PC)
        avctx->pix_fmt = replace_j_format(avctx->pix_fmt);
    if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
        MP_ERR(ctx, "Image format %s not supported by lavc.\n",
               mp_imgfmt_to_name(image->imgfmt));
        goto error_exit;
    }
    if (codec->id == AV_CODEC_ID_PNG) {
        avctx->compression_level = ctx->opts->png_compression;
        av_opt_set_int(avctx, "pred", ctx->opts->png_filter,
                       AV_OPT_SEARCH_CHILDREN);
    }

    if (avcodec_open2(avctx, codec, NULL) < 0) {
     print_open_fail:
        MP_ERR(ctx, "Could not open libavcodec encoder for saving images\n");
        goto error_exit;
    }

    pic = av_frame_alloc();
    if (!pic)
        goto error_exit;
    for (int n = 0; n < 4; n++) {
        pic->data[n] = image->planes[n];
        pic->linesize[n] = image->stride[n];
    }
    pic->format = avctx->pix_fmt;
    pic->width = avctx->width;
    pic->height = avctx->height;
    pic->color_range = avctx->color_range;
    if (ctx->opts->tag_csp) {
        pic->color_primaries = mp_csp_prim_to_avcol_pri(image->params.color.primaries);
        pic->color_trc = mp_csp_trc_to_avcol_trc(image->params.color.gamma);
    }

    int ret = avcodec_send_frame(avctx, pic);
    if (ret < 0)
        goto error_exit;
    ret = avcodec_send_frame(avctx, NULL); // send EOF
    if (ret < 0)
        goto error_exit;
    ret = avcodec_receive_packet(avctx, &pkt);
    if (ret < 0)
        goto error_exit;
    got_output = 1;

    fwrite(pkt.data, pkt.size, 1, fp);

    success = !!got_output;
error_exit:
    if (avctx)
        avcodec_close(avctx);
    av_free(avctx);
    av_frame_free(&pic);
    av_packet_unref(&pkt);
    return success;
}
Exemplo n.º 20
0
static void png_mkdir(char *buf, int verbose) {
    struct stat stat_p;

#ifndef __MINGW32__
    if ( mkdir(buf, 0755) < 0 ) {
#else
    if ( mkdir(buf) < 0 ) {
#endif
        switch (errno) { /* use switch in case other errors need to be caught
                            and handled in the future */
            case EEXIST:
                if ( stat(buf, &stat_p ) < 0 ) {
                    mp_msg(MSGT_VO, MSGL_ERR, "%s: %s: %s\n", info.short_name,
                            MSGTR_VO_GenericError, strerror(errno) );
                    mp_msg(MSGT_VO, MSGL_ERR, "%s: %s %s\n", info.short_name,
                            MSGTR_VO_UnableToAccess,buf);
                    exit_player(EXIT_ERROR);
                }
                if ( !S_ISDIR(stat_p.st_mode) ) {
                    mp_msg(MSGT_VO, MSGL_ERR, "%s: %s %s\n", info.short_name,
                            buf, MSGTR_VO_ExistsButNoDirectory);
                    exit_player(EXIT_ERROR);
                }
                if ( !(stat_p.st_mode & S_IWUSR) ) {
                    mp_msg(MSGT_VO, MSGL_ERR, "%s: %s - %s\n", info.short_name,
                            buf, MSGTR_VO_DirExistsButNotWritable);
                    exit_player(EXIT_ERROR);
                }

                mp_msg(MSGT_VO, MSGL_INFO, "%s: %s: %s\n", info.short_name, MSGTR_VO_OutputDirectory, buf);
                break;

            default:
                mp_msg(MSGT_VO, MSGL_ERR, "%s: %s: %s\n", info.short_name,
                        MSGTR_VO_GenericError, strerror(errno) );
                mp_msg(MSGT_VO, MSGL_ERR, "%s: %s - %s\n", info.short_name,
                        buf, MSGTR_VO_CantCreateDirectory);
                exit_player(EXIT_ERROR);
        } /* end switch */
    } else if ( verbose ) {
        mp_msg(MSGT_VO, MSGL_INFO, "%s: %s - %s\n", info.short_name,
                buf, MSGTR_VO_DirectoryCreateSuccess);
    } /* end if */
}

static int
config(uint32_t width, uint32_t height, uint32_t d_width, uint32_t d_height, uint32_t flags, char *title, uint32_t format)
{
    char buf[BUFLENGTH];

	    if(z_compression == 0) {
 		    mp_msg(MSGT_VO,MSGL_INFO, MSGTR_LIBVO_PNG_Warning1);
 		    mp_msg(MSGT_VO,MSGL_INFO, MSGTR_LIBVO_PNG_Warning2);
 		    mp_msg(MSGT_VO,MSGL_INFO, MSGTR_LIBVO_PNG_Warning3);
	    }

    snprintf(buf, BUFLENGTH, "%s", png_outdir);
    png_mkdir(buf, 1);
    mp_msg(MSGT_VO,MSGL_DBG2, "PNG Compression level %i\n", z_compression);


    if (avctx && png_format != format) {
        avcodec_close(avctx);
        av_freep(&avctx);
    }

    if (!avctx) {
        avctx = avcodec_alloc_context3(NULL);
        avctx->compression_level = z_compression;
        avctx->pix_fmt = imgfmt2pixfmt(format);
        avctx->width = width;
        avctx->height = height;
        if (avcodec_open2(avctx, avcodec_find_encoder(AV_CODEC_ID_PNG), NULL) < 0) {
            uninit();
            return -1;
        }
        png_format = format;
    }
    return 0;
}