static int request_frame(AVFilterLink *link) { BufferSourceContext *c = link->src->priv; AVFilterPicRef *picref; if (!c->has_frame) { av_log(link->src, AV_LOG_ERROR, "request_frame() called with no available frame!\n"); //return -1; } /* This picture will be needed unmodified later for decoding the next * frame */ picref = avfilter_get_video_buffer(link, AV_PERM_WRITE | AV_PERM_PRESERVE | AV_PERM_REUSE2, link->w, link->h); av_picture_copy((AVPicture *)&picref->data, (AVPicture *)&c->frame, picref->pic->format, link->w, link->h); picref->pts = c->pts; picref->pixel_aspect = c->pixel_aspect; picref->interlaced = c->frame.interlaced_frame; picref->top_field_first = c->frame.top_field_first; avfilter_start_frame(link, avfilter_ref_pic(picref, ~0)); avfilter_draw_slice(link, 0, link->h, 1); avfilter_end_frame(link); avfilter_unref_pic(picref); c->has_frame = 0; return 0; }
static void start_frame(AVFilterLink *link, AVFilterPicRef *picref) { CropContext *crop = link->dst->priv; AVFilterPicRef *ref2 = avfilter_ref_pic(picref, ~0); #ifdef _MSC_VER AVPixFmtDescriptor *av_pix_fmt_descriptors = get_av_pix_fmt_descriptors(); #endif int i; ref2->w = crop->w; ref2->h = crop->h; ref2->data[0] += crop->y * ref2->linesize[0]; ref2->data[0] += (crop->x * crop->bpp) >> 3; if (!(av_pix_fmt_descriptors[link->format].flags & PIX_FMT_PAL)) { for (i = 1; i < 3; i ++) { if (ref2->data[i]) { ref2->data[i] += (crop->y >> crop->vsub) * ref2->linesize[i]; ref2->data[i] += ((crop->x * crop->bpp) >> 3) >> crop->hsub; } } }
void avfilter_default_start_frame(AVFilterLink *link, AVFilterPicRef *picref) { AVFilterLink *out = NULL; if(link->dst->output_count) out = link->dst->outputs[0]; if(out) { out->outpic = avfilter_get_video_buffer(out, AV_PERM_WRITE); out->outpic->pts = picref->pts; avfilter_start_frame(out, avfilter_ref_pic(out->outpic, ~0)); } }
static void start_frame(AVFilterLink *link, AVFilterPicRef *picref) { AVFilterLink *out = link->dst->outputs[0]; out->outpic = avfilter_get_video_buffer(out, AV_PERM_WRITE, out->w, out->h); out->outpic->pts = picref->pts; if(picref->pixel_aspect.num == 0) { out->outpic->pixel_aspect = picref->pixel_aspect; } else { out->outpic->pixel_aspect.num = picref->pixel_aspect.den; out->outpic->pixel_aspect.den = picref->pixel_aspect.num; } avfilter_start_frame(out, avfilter_ref_pic(out->outpic, ~0)); }
void avfilter_default_start_frame(AVFilterLink *link, AVFilterPicRef *picref) { AVFilterLink *out = NULL; if(link->dst->output_count) out = link->dst->outputs[0]; if(out) { out->outpic = avfilter_get_video_buffer(out, AV_PERM_WRITE, out->w, out->h); out->outpic->pts = picref->pts; out->outpic->pos = picref->pos; out->outpic->pixel_aspect = picref->pixel_aspect; out->outpic->interlaced = picref->interlaced; out->outpic->top_field_first = picref->top_field_first; avfilter_start_frame(out, avfilter_ref_pic(out->outpic, ~0)); } }
static void start_frame(AVFilterLink *link, AVFilterPicRef *picref) { CropContext *crop = link->dst->priv; AVFilterPicRef *ref2 = avfilter_ref_pic(picref, ~0); int i; ref2->w = crop->w; ref2->h = crop->h; ref2->data[0] += crop->y * ref2->linesize[0]; ref2->data[0] += (crop->x * crop->bpp) >> 3; if (link->format != PIX_FMT_PAL8 && link->format != PIX_FMT_BGR4_BYTE && link->format != PIX_FMT_RGB4_BYTE && link->format != PIX_FMT_BGR8 && link->format != PIX_FMT_RGB8) { for (i = 1; i < 3; i ++) { if (ref2->data[i]) { ref2->data[i] += (crop->y >> crop->vsub) * ref2->linesize[i]; ref2->data[i] += ((crop->x * crop->bpp) >> 3) >> crop->hsub; } } }
static int request_frame(AVFilterLink *link) { FPSContext *fps = link->src->priv; if (fps->videoend) return -1; if (!fps->has_frame) // support for filtering without poll_frame usage while(!fps->pic || fps->pic->pts < fps->pts) if(avfilter_request_frame(link->src->inputs[0])) return -1; fps->has_frame=0; avfilter_start_frame(link, avfilter_ref_pic(fps->pic, ~AV_PERM_WRITE)); avfilter_draw_slice (link, 0, fps->pic->h, 1); avfilter_end_frame (link); avfilter_unref_pic(fps->pic); fps->pic = NULL; fps->pts += fps->timebase; return 0; }