static int vibrance_slice16(AVFilterContext *avctx, void *arg, int jobnr, int nb_jobs) { VibranceContext *s = avctx->priv; AVFrame *frame = arg; const int depth = s->depth; const float max = (1 << depth) - 1; const float scale = 1.f / max; const float gc = s->lcoeffs[0]; const float bc = s->lcoeffs[1]; const float rc = s->lcoeffs[2]; const int width = frame->width; const int height = frame->height; const float intensity = s->intensity; const float alternate = s->alternate ? 1.f : -1.f; const float gintensity = intensity * s->balance[0]; const float bintensity = intensity * s->balance[1]; const float rintensity = intensity * s->balance[2]; const float sgintensity = alternate * FFSIGN(gintensity); const float sbintensity = alternate * FFSIGN(bintensity); const float srintensity = alternate * FFSIGN(rintensity); const int slice_start = (height * jobnr) / nb_jobs; const int slice_end = (height * (jobnr + 1)) / nb_jobs; const int glinesize = frame->linesize[0] / 2; const int blinesize = frame->linesize[1] / 2; const int rlinesize = frame->linesize[2] / 2; uint16_t *gptr = (uint16_t *)frame->data[0] + slice_start * glinesize; uint16_t *bptr = (uint16_t *)frame->data[1] + slice_start * blinesize; uint16_t *rptr = (uint16_t *)frame->data[2] + slice_start * rlinesize; for (int y = slice_start; y < slice_end; y++) { for (int x = 0; x < width; x++) { float g = gptr[x] * scale; float b = bptr[x] * scale; float r = rptr[x] * scale; float max_color = FFMAX3(r, g, b); float min_color = FFMIN3(r, g, b); float color_saturation = max_color - min_color; float luma = g * gc + r * rc + b * bc; const float cg = 1.f + gintensity * (1.f - sgintensity * color_saturation); const float cb = 1.f + bintensity * (1.f - sbintensity * color_saturation); const float cr = 1.f + rintensity * (1.f - srintensity * color_saturation); g = lerpf(luma, g, cg); b = lerpf(luma, b, cb); r = lerpf(luma, r, cr); gptr[x] = av_clip_uintp2_c(g * max, depth); bptr[x] = av_clip_uintp2_c(b * max, depth); rptr[x] = av_clip_uintp2_c(r * max, depth); } gptr += glinesize; bptr += blinesize; rptr += rlinesize; } return 0; }
int ff_mpeg4_get_video_packet_prefix_length(MpegEncContext *s){ switch(s->pict_type){ case AV_PICTURE_TYPE_I: return 16; case AV_PICTURE_TYPE_P: case AV_PICTURE_TYPE_S: return s->f_code+15; case AV_PICTURE_TYPE_B: return FFMAX3(s->f_code, s->b_code, 2) + 15; default: return -1; } }
int ff_mpeg4_get_video_packet_prefix_length(MpegEncContext *s){ switch(s->pict_type){ case FF_I_TYPE: return 16; case FF_P_TYPE: case FF_S_TYPE: return s->f_code+15; case FF_B_TYPE: return FFMAX3(s->f_code, s->b_code, 2) + 15; default: return -1; } }
const char *av_basename(const char *path) { char *p = strrchr(path, '/'); #if HAVE_DOS_PATHS char *q = strrchr(path, '\\'); char *d = strchr(path, ':'); p = FFMAX3(p, q, d); #endif if (!p) return path; return p + 1; }
static struct fb_cmap *make_directcolor_cmap(struct fb_var_screeninfo *var) { int i, cols, rcols, gcols, bcols; uint16_t *red, *green, *blue; struct fb_cmap *cmap; rcols = 1 << var->red.length; gcols = 1 << var->green.length; bcols = 1 << var->blue.length; /* Make our palette the length of the deepest color */ cols = FFMAX3(rcols, gcols, bcols); red = malloc(3 * cols * sizeof(red[0])); if(!red) { mp_msg(MSGT_VO, MSGL_ERR, "[fbdev2] Can't allocate red palette with %d entries.\n", cols); return NULL; } green = red + cols; blue = green + cols; for (i = 0; i < cols; i++) { red[i] = (65535/(rcols-1)) * i; green[i] = (65535/(gcols-1)) * i; blue[i] = (65535/(bcols-1)) * i; } cmap = malloc(sizeof(struct fb_cmap)); if(!cmap) { mp_msg(MSGT_VO, MSGL_ERR, "[fbdev2] Can't allocate color map\n"); free(red); return NULL; } cmap->start = 0; cmap->transp = 0; cmap->len = cols; cmap->red = red; cmap->blue = blue; cmap->green = green; cmap->transp = NULL; return cmap; }
const char *av_dirname(char *path) { char *p = strrchr(path, '/'); #if HAVE_DOS_PATHS char *q = strrchr(path, '\\'); char *d = strchr(path, ':'); d = d ? d + 1 : d; p = FFMAX3(p, q, d); #endif if (!p) return "."; *p = '\0'; return path; }
static int mjpegb_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt) { const uint8_t *buf = avpkt->data; int buf_size = avpkt->size; MJpegDecodeContext *s = avctx->priv_data; const uint8_t *buf_end, *buf_ptr; GetBitContext hgb; /* for the header */ uint32_t dqt_offs, dht_offs, sof_offs, sos_offs, second_field_offs; uint32_t field_size, sod_offs; int ret; buf_ptr = buf; buf_end = buf + buf_size; s->got_picture = 0; read_header: /* reset on every SOI */ s->restart_interval = 0; s->restart_count = 0; s->mjpb_skiptosod = 0; if (buf_end - buf_ptr >= 1 << 28) return AVERROR_INVALIDDATA; init_get_bits(&hgb, buf_ptr, /*buf_size*/(buf_end - buf_ptr)*8); skip_bits(&hgb, 32); /* reserved zeros */ if (get_bits_long(&hgb, 32) != MKBETAG('m','j','p','g')) { av_log(avctx, AV_LOG_WARNING, "not mjpeg-b (bad fourcc)\n"); return AVERROR_INVALIDDATA; } field_size = get_bits_long(&hgb, 32); /* field size */ av_log(avctx, AV_LOG_DEBUG, "field size: 0x%"PRIx32"\n", field_size); skip_bits(&hgb, 32); /* padded field size */ second_field_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "second_field_offs is %d and size is %d\n"); av_log(avctx, AV_LOG_DEBUG, "second field offs: 0x%"PRIx32"\n", second_field_offs); dqt_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "dqt is %d and size is %d\n"); av_log(avctx, AV_LOG_DEBUG, "dqt offs: 0x%"PRIx32"\n", dqt_offs); if (dqt_offs) { init_get_bits(&s->gb, buf_ptr+dqt_offs, (buf_end - (buf_ptr+dqt_offs))*8); s->start_code = DQT; ret = ff_mjpeg_decode_dqt(s); if (ret < 0 && (avctx->err_recognition & AV_EF_EXPLODE)) return ret; } dht_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "dht is %d and size is %d\n"); av_log(avctx, AV_LOG_DEBUG, "dht offs: 0x%"PRIx32"\n", dht_offs); if (dht_offs) { init_get_bits(&s->gb, buf_ptr+dht_offs, (buf_end - (buf_ptr+dht_offs))*8); s->start_code = DHT; ff_mjpeg_decode_dht(s); } sof_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "sof is %d and size is %d\n"); av_log(avctx, AV_LOG_DEBUG, "sof offs: 0x%"PRIx32"\n", sof_offs); if (sof_offs) { init_get_bits(&s->gb, buf_ptr+sof_offs, (buf_end - (buf_ptr+sof_offs))*8); s->start_code = SOF0; if ((ret = ff_mjpeg_decode_sof(s)) < 0) return ret; } sos_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "sos is %d and size is %d\n"); av_log(avctx, AV_LOG_DEBUG, "sos offs: 0x%"PRIx32"\n", sos_offs); sod_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "sof is %d and size is %d\n"); av_log(avctx, AV_LOG_DEBUG, "sod offs: 0x%"PRIx32"\n", sod_offs); if (sos_offs) { init_get_bits(&s->gb, buf_ptr + sos_offs, 8 * FFMIN(field_size, buf_end - buf_ptr - sos_offs)); s->mjpb_skiptosod = (sod_offs - sos_offs - show_bits(&s->gb, 16)); s->start_code = SOS; ret = ff_mjpeg_decode_sos(s, NULL, 0, NULL); if (ret < 0 && (avctx->err_recognition & AV_EF_EXPLODE)) return ret; } if (s->interlaced) { s->bottom_field ^= 1; /* if not bottom field, do not output image yet */ if (s->bottom_field != s->interlace_polarity && second_field_offs) { buf_ptr = buf + second_field_offs; goto read_header; } } //XXX FIXME factorize, this looks very similar to the EOI code if(!s->got_picture) { av_log(avctx, AV_LOG_WARNING, "no picture\n"); return buf_size; } if ((ret = av_frame_ref(data, s->picture_ptr)) < 0) return ret; *got_frame = 1; if (!s->lossless && avctx->debug & FF_DEBUG_QP) { av_log(avctx, AV_LOG_DEBUG, "QP: %d\n", FFMAX3(s->qscale[0], s->qscale[1], s->qscale[2])); } return buf_size; }
static void peak_write_frame(AVFormatContext *s) { WAVMuxContext *wav = s->priv_data; AVCodecContext *enc = s->streams[0]->codec; int peak_of_peaks; int c; if (!wav->peak_output) return; for (c = 0; c < enc->channels; c++) { wav->peak_maxneg[c] = -wav->peak_maxneg[c]; if (wav->peak_bps == 2 && wav->peak_format == PEAK_FORMAT_UINT8) { wav->peak_maxpos[c] = wav->peak_maxpos[c] / 256; wav->peak_maxneg[c] = wav->peak_maxneg[c] / 256; } if (wav->peak_ppv == 1) wav->peak_maxpos[c] = FFMAX(wav->peak_maxpos[c], wav->peak_maxneg[c]); peak_of_peaks = FFMAX3(wav->peak_maxpos[c], wav->peak_maxneg[c], wav->peak_pop); if (peak_of_peaks > wav->peak_pop) wav->peak_pos_pop = wav->peak_num_frames; wav->peak_pop = peak_of_peaks; if (wav->peak_outbuf_size - wav->peak_outbuf_bytes < wav->peak_format * wav->peak_ppv) { wav->peak_outbuf_size += PEAK_BUFFER_SIZE; wav->peak_output = av_realloc(wav->peak_output, wav->peak_outbuf_size); if (!wav->peak_output) { av_log(s, AV_LOG_ERROR, "No memory for peak data\n"); return; } } if (wav->peak_format == PEAK_FORMAT_UINT8) { wav->peak_output[wav->peak_outbuf_bytes++] = wav->peak_maxpos[c]; if (wav->peak_ppv == 2) { wav->peak_output[wav->peak_outbuf_bytes++] = wav->peak_maxneg[c]; } } else { AV_WL16(wav->peak_output + wav->peak_outbuf_bytes, wav->peak_maxpos[c]); wav->peak_outbuf_bytes += 2; if (wav->peak_ppv == 2) { AV_WL16(wav->peak_output + wav->peak_outbuf_bytes, wav->peak_maxneg[c]); wav->peak_outbuf_bytes += 2; } } wav->peak_maxpos[c] = 0; wav->peak_maxneg[c] = 0; } wav->peak_num_frames++; }
static int mjpegb_decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPacket *avpkt) { const uint8_t *buf = avpkt->data; int buf_size = avpkt->size; MJpegDecodeContext *s = avctx->priv_data; const uint8_t *buf_end, *buf_ptr; AVFrame *picture = data; GetBitContext hgb; /* for the header */ uint32_t dqt_offs, dht_offs, sof_offs, sos_offs, second_field_offs; uint32_t field_size, sod_offs; buf_ptr = buf; buf_end = buf + buf_size; read_header: /* reset on every SOI */ s->restart_interval = 0; s->restart_count = 0; s->mjpb_skiptosod = 0; init_get_bits(&hgb, buf_ptr, /*buf_size*/(buf_end - buf_ptr)*8); skip_bits(&hgb, 32); /* reserved zeros */ if (get_bits_long(&hgb, 32) != MKBETAG('m','j','p','g')) { av_log(avctx, AV_LOG_WARNING, "not mjpeg-b (bad fourcc)\n"); return 0; } field_size = get_bits_long(&hgb, 32); /* field size */ av_log(avctx, AV_LOG_DEBUG, "field size: 0x%x\n", field_size); skip_bits(&hgb, 32); /* padded field size */ second_field_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "second_field_offs is %d and size is %d\n"); av_log(avctx, AV_LOG_DEBUG, "second field offs: 0x%x\n", second_field_offs); dqt_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "dqt is %d and size is %d\n"); av_log(avctx, AV_LOG_DEBUG, "dqt offs: 0x%x\n", dqt_offs); if (dqt_offs) { init_get_bits(&s->gb, buf_ptr+dqt_offs, (buf_end - (buf_ptr+dqt_offs))*8); s->start_code = DQT; ff_mjpeg_decode_dqt(s); } dht_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "dht is %d and size is %d\n"); av_log(avctx, AV_LOG_DEBUG, "dht offs: 0x%x\n", dht_offs); if (dht_offs) { init_get_bits(&s->gb, buf_ptr+dht_offs, (buf_end - (buf_ptr+dht_offs))*8); s->start_code = DHT; ff_mjpeg_decode_dht(s); } sof_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "sof is %d and size is %d\n"); av_log(avctx, AV_LOG_DEBUG, "sof offs: 0x%x\n", sof_offs); if (sof_offs) { init_get_bits(&s->gb, buf_ptr+sof_offs, (buf_end - (buf_ptr+sof_offs))*8); s->start_code = SOF0; if (ff_mjpeg_decode_sof(s) < 0) return -1; } sos_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "sos is %d and size is %d\n"); av_log(avctx, AV_LOG_DEBUG, "sos offs: 0x%x\n", sos_offs); sod_offs = read_offs(avctx, &hgb, buf_end - buf_ptr, "sof is %d and size is %d\n"); av_log(avctx, AV_LOG_DEBUG, "sod offs: 0x%x\n", sod_offs); if (sos_offs) { // init_get_bits(&s->gb, buf+sos_offs, (buf_end - (buf+sos_offs))*8); init_get_bits(&s->gb, buf_ptr+sos_offs, field_size*8); s->mjpb_skiptosod = (sod_offs - sos_offs - show_bits(&s->gb, 16)); s->start_code = SOS; ff_mjpeg_decode_sos(s, NULL, NULL); } if (s->interlaced) { s->bottom_field ^= 1; /* if not bottom field, do not output image yet */ if (s->bottom_field != s->interlace_polarity && second_field_offs) { buf_ptr = buf + second_field_offs; second_field_offs = 0; goto read_header; } } //XXX FIXME factorize, this looks very similar to the EOI code *picture= *s->picture_ptr; *data_size = sizeof(AVFrame); if(!s->lossless){ picture->quality= FFMAX3(s->qscale[0], s->qscale[1], s->qscale[2]); picture->qstride= 0; picture->qscale_table= s->qscale_table; memset(picture->qscale_table, picture->quality, (s->width+15)/16); if(avctx->debug & FF_DEBUG_QP) av_log(avctx, AV_LOG_DEBUG, "QP: %d\n", picture->quality); picture->quality*= FF_QP2LAMBDA; } return buf_ptr - buf; }
/* * Note: this function is completely cut'n'pasted from * Chris Lawrence's code. * (modified a bit to fit in my code...) */ static struct fb_cmap *make_directcolor_cmap(struct fb_var_screeninfo *var) { /* Hopefully any DIRECTCOLOR device will have a big enough palette * to handle mapping the full color depth. * e.g. 8 bpp -> 256 entry palette * * We could handle some sort of gamma here */ int i, cols, rcols, gcols, bcols; uint16_t *red, *green, *blue; struct fb_cmap *cmap; rcols = 1 << var->red.length; gcols = 1 << var->green.length; bcols = 1 << var->blue.length; /* Make our palette the length of the deepest color */ cols = FFMAX3(rcols, gcols, bcols); red = malloc(cols * sizeof(red[0])); if (!red) { mp_msg(MSGT_VO, MSGL_V, "Can't allocate red palette with %d entries.\n", cols); return NULL; } for (i = 0; i < rcols; i++) red[i] = (65535 / (rcols - 1)) * i; green = malloc(cols * sizeof(green[0])); if (!green) { mp_msg(MSGT_VO, MSGL_V, "Can't allocate green palette with %d entries.\n", cols); free(red); return NULL; } for (i = 0; i < gcols; i++) green[i] = (65535 / (gcols - 1)) * i; blue = malloc(cols * sizeof(blue[0])); if (!blue) { mp_msg(MSGT_VO, MSGL_V, "Can't allocate blue palette with %d entries.\n", cols); free(red); free(green); return NULL; } for (i = 0; i < bcols; i++) blue[i] = (65535 / (bcols - 1)) * i; cmap = malloc(sizeof(struct fb_cmap)); if (!cmap) { mp_msg(MSGT_VO, MSGL_V, "Can't allocate color map\n"); free(red); free(green); free(blue); return NULL; } cmap->start = 0; cmap->transp = 0; cmap->len = cols; cmap->red = red; cmap->blue = blue; cmap->green = green; cmap->transp = NULL; return cmap; }
static void avfilter_graph_dump_to_buf(AVBPrint *buf, AVFilterGraph *graph) { unsigned i, j, x, e; for (i = 0; i < graph->filter_count; i++) { AVFilterContext *filter = graph->filters[i]; unsigned max_src_name = 0, max_dst_name = 0; unsigned max_in_name = 0, max_out_name = 0; unsigned max_in_fmt = 0, max_out_fmt = 0; unsigned width, height, in_indent; unsigned lname = strlen(filter->name); unsigned ltype = strlen(filter->filter->name); for (j = 0; j < filter->input_count; j++) { AVFilterLink *l = filter->inputs[j]; unsigned ln = strlen(l->src->name) + 1 + strlen(l->srcpad->name); max_src_name = FFMAX(max_src_name, ln); max_in_name = FFMAX(max_in_name, strlen(l->dstpad->name)); max_in_fmt = FFMAX(max_in_fmt, print_link_prop(NULL, l)); } for (j = 0; j < filter->output_count; j++) { AVFilterLink *l = filter->outputs[j]; unsigned ln = strlen(l->dst->name) + 1 + strlen(l->dstpad->name); max_dst_name = FFMAX(max_dst_name, ln); max_out_name = FFMAX(max_out_name, strlen(l->srcpad->name)); max_out_fmt = FFMAX(max_out_fmt, print_link_prop(NULL, l)); } in_indent = max_src_name + max_in_name + max_in_fmt; in_indent += in_indent ? 4 : 0; width = FFMAX(lname + 2, ltype + 4); height = FFMAX3(2, filter->input_count, filter->output_count); av_bprint_chars(buf, ' ', in_indent); av_bprintf(buf, "+"); av_bprint_chars(buf, '-', width); av_bprintf(buf, "+\n"); for (j = 0; j < height; j++) { unsigned in_no = j - (height - filter->input_count ) / 2; unsigned out_no = j - (height - filter->output_count) / 2; /* Input link */ if (in_no < filter->input_count) { AVFilterLink *l = filter->inputs[in_no]; e = buf->len + max_src_name + 2; av_bprintf(buf, "%s:%s", l->src->name, l->srcpad->name); av_bprint_chars(buf, '-', e - buf->len); e = buf->len + max_in_fmt + 2 + max_in_name - strlen(l->dstpad->name); print_link_prop(buf, l); av_bprint_chars(buf, '-', e - buf->len); av_bprintf(buf, "%s", l->dstpad->name); } else { av_bprint_chars(buf, ' ', in_indent); } /* Filter */ av_bprintf(buf, "|"); if (j == (height - 2) / 2) { x = (width - lname) / 2; av_bprintf(buf, "%*s%-*s", x, "", width - x, filter->name); } else if (j == (height - 2) / 2 + 1) { x = (width - ltype - 2) / 2; av_bprintf(buf, "%*s(%s)%*s", x, "", filter->filter->name, width - ltype - 2 - x, ""); } else { av_bprint_chars(buf, ' ', width); } av_bprintf(buf, "|"); /* Output link */ if (out_no < filter->output_count) { AVFilterLink *l = filter->outputs[out_no]; unsigned ln = strlen(l->dst->name) + 1 + strlen(l->dstpad->name); e = buf->len + max_out_name + 2; av_bprintf(buf, "%s", l->srcpad->name); av_bprint_chars(buf, '-', e - buf->len); e = buf->len + max_out_fmt + 2 + max_dst_name - ln; print_link_prop(buf, l); av_bprint_chars(buf, '-', e - buf->len); av_bprintf(buf, "%s:%s", l->dst->name, l->dstpad->name); } av_bprintf(buf, "\n"); } av_bprint_chars(buf, ' ', in_indent); av_bprintf(buf, "+"); av_bprint_chars(buf, '-', width); av_bprintf(buf, "+\n"); av_bprintf(buf, "\n"); } }
static void tonemap(TonemapContext *s, AVFrame *out, const AVFrame *in, const AVPixFmtDescriptor *desc, int x, int y, double peak) { const float *r_in = (const float *)(in->data[0] + x * desc->comp[0].step + y * in->linesize[0]); const float *b_in = (const float *)(in->data[1] + x * desc->comp[1].step + y * in->linesize[1]); const float *g_in = (const float *)(in->data[2] + x * desc->comp[2].step + y * in->linesize[2]); float *r_out = (float *)(out->data[0] + x * desc->comp[0].step + y * out->linesize[0]); float *b_out = (float *)(out->data[1] + x * desc->comp[1].step + y * out->linesize[1]); float *g_out = (float *)(out->data[2] + x * desc->comp[2].step + y * out->linesize[2]); float sig, sig_orig; /* load values */ *r_out = *r_in; *b_out = *b_in; *g_out = *g_in; /* desaturate to prevent unnatural colors */ if (s->desat > 0) { float luma = s->coeffs->cr * *r_in + s->coeffs->cg * *g_in + s->coeffs->cb * *b_in; float overbright = FFMAX(luma - s->desat, 1e-6) / FFMAX(luma, 1e-6); *r_out = MIX(*r_in, luma, overbright); *g_out = MIX(*g_in, luma, overbright); *b_out = MIX(*b_in, luma, overbright); } /* pick the brightest component, reducing the value range as necessary * to keep the entire signal in range and preventing discoloration due to * out-of-bounds clipping */ sig = FFMAX(FFMAX3(*r_out, *g_out, *b_out), 1e-6); sig_orig = sig; switch(s->tonemap) { default: case TONEMAP_NONE: // do nothing break; case TONEMAP_LINEAR: sig = sig * s->param / peak; break; case TONEMAP_GAMMA: sig = sig > 0.05f ? pow(sig / peak, 1.0f / s->param) : sig * pow(0.05f / peak, 1.0f / s->param) / 0.05f; break; case TONEMAP_CLIP: sig = av_clipf(sig * s->param, 0, 1.0f); break; case TONEMAP_HABLE: sig = hable(sig) / hable(peak); break; case TONEMAP_REINHARD: sig = sig / (sig + s->param) * (peak + s->param) / peak; break; case TONEMAP_MOBIUS: sig = mobius(sig, s->param, peak); break; } /* apply the computed scale factor to the color, * linearly to prevent discoloration */ *r_out *= sig / sig_orig; *g_out *= sig / sig_orig; *b_out *= sig / sig_orig; }