static void die_codec(vpx_codec_ctx_t *ctx, const char *s) { const char *detail = vpx_codec_error_detail(ctx); printf("%s: %s\n", s, vpx_codec_error(ctx)); if (detail) printf(" %s\n", detail); exit(EXIT_FAILURE); }
static void die_codec(vpx_codec_ctx_t *ctx, const char *s) { // const char *detail = vpx_codec_error_detail(ctx); // // printf("%s: %s\n", s, vpx_codec_error(ctx)); // if(detail) // printf(" %s\n",detail); // exit(EXIT_FAILURE); // } //
void WebmExporter::codec_error(const std::string& s) { auto detail = vpx_codec_error_detail(&_codec); std::cerr << s << ": " << vpx_codec_error(&_codec); if (detail) { std::cerr << ": " << detail; } std::cerr << std::endl; }
void krad_vpx_encoder_config_set (krad_vpx_encoder_t *vpx, vpx_codec_enc_cfg_t *cfg) { int ret; ret = vpx_codec_enc_config_set (&vpx->encoder, cfg); if (ret != VPX_CODEC_OK) { printke ("VPX Config problem: %s\n%s\n", vpx_codec_err_to_string (ret), vpx_codec_error_detail(&vpx->encoder)); } }
static void krad_vpx_fail (vpx_codec_ctx_t *ctx, const char *s) { const char *detail = vpx_codec_error_detail(ctx); printke ("%s: %s\n", s, vpx_codec_error(ctx)); if (detail) { printke ("%s\n", detail); } failfast (""); }
static void krad_vpx_fail (vpx_codec_ctx_t *ctx, const char *msg) { const char *err_details; err_details = vpx_codec_error_detail (ctx); if (err_details == NULL) { err_details = "None"; } printf ("Krad VPX Fail: %s %s\nDetail: %s", msg, vpx_codec_error (ctx), err_details); }
static void ctx_exit_on_error(vpx_codec_ctx_t *ctx, const char *s) { if (ctx->err) { const char *detail = vpx_codec_error_detail(ctx); fprintf(stderr, "%s: %s\n", s, vpx_codec_error(ctx)); if (detail) fprintf(stderr, " %s\n", detail); exit(EXIT_FAILURE); } }
pascal ComponentResult VP8_Encoder_BeginPass(VP8EncoderGlobals globals,ICMCompressionPassModeFlags passModeFlags, UInt32 flags, ICMMultiPassStorageRef multiPassStorage ) { ComponentResult err = noErr; dbg_printf("[VP8e -- %08lx] VP8_Encoder_BeginPass(%lu, %lu) \n", (UInt32) globals, passModeFlags,flags); if ((passModeFlags &kICMCompressionPassMode_OutputEncodedFrames) && !(passModeFlags & kICMCompressionPassMode_ReadFromMultiPassStorage)) { dbg_printf("[VP8e -- %08lx] default 1 pass \n", (UInt32) globals); globals->currentPass = VPX_RC_ONE_PASS; } else if ((passModeFlags & kICMCompressionPassMode_WriteToMultiPassStorage) && !(passModeFlags & kICMCompressionPassMode_OutputEncodedFrames)) { dbg_printf("[VP8e -- %08lx] First Pass \n", (UInt32) globals); if (globals->stats.buf != NULL) { free(globals->stats.buf); globals->stats.buf =NULL; globals->stats.sz=0; } globals->currentPass = VPX_RC_FIRST_PASS; } else if ((passModeFlags & kICMCompressionPassMode_OutputEncodedFrames) && (passModeFlags & kICMCompressionPassMode_ReadFromMultiPassStorage)) { dbg_printf("[VP8e -- %08lx] Second Pass \n", (UInt32) globals); globals->currentPass = VPX_RC_LAST_PASS; if (globals->codec == NULL) // this should be initialized if there was a first pass return nilHandleErr; globals->cfg.g_pass = VPX_RC_LAST_PASS; globals->cfg.rc_twopass_stats_in.sz = globals->stats.sz; globals->cfg.rc_twopass_stats_in.buf = globals->stats.buf; globals->frameCount = 0; if(vpx_codec_enc_init(globals->codec, &vpx_codec_vp8_cx_algo, &globals->cfg, 0)) { const char *detail = vpx_codec_error_detail(globals->codec); dbg_printf("[VP8e] Failed to initialize encoder second pass %s\n", detail); return notOpenErr; } setCustomPostInit(globals); //not sure if I this is needed just following ivfenc example } else { return paramErr;///not sure what other type of pass there is } return err; }
//initialize the codec if needed static void initializeCodec(VP8EncoderGlobals glob, ICMCompressorSourceFrameRef sourceFrame) { if (glob->codec != NULL) return; dbg_printf("[vp8e - %08lx] initializeCodec\n", (UInt32)glob); glob->codec = calloc(1, sizeof(vpx_codec_ctx_t)); setBitrate(glob, sourceFrame); //because we don't know framerate untile we have a source image.. this is done here setMaxKeyDist(glob); setFrameRate(glob); setCustom(glob); glob->cfg.g_pass = glob->currentPass; dbg_printEncoderSettings(&glob->cfg); if (vpx_codec_enc_init(glob->codec, &vpx_codec_vp8_cx_algo, &glob->cfg, 0)) { const char *detail = vpx_codec_error_detail(glob->codec); dbg_printf("[vp8e - %08lx] Failed to initialize encoder pass = %d %s\n", (UInt32)glob, glob->currentPass, detail); } setCustomPostInit(glob); }
static void enc_preprocess(MSFilter *f) { vpx_codec_err_t res; EncState *s=(EncState*)f->data; s->cfg.g_w = s->vconf.vsize.width; s->cfg.g_h = s->vconf.vsize.height; s->cfg.g_timebase.den=s->vconf.fps; /* Initialize codec */ #ifdef FRAGMENT_ON_PARTITIONS /* VPX_CODEC_USE_OUTPUT_PARTITION: output 1 frame per partition */ res = vpx_codec_enc_init(&s->codec, interface, &s->cfg, VPX_CODEC_USE_OUTPUT_PARTITION); #else res = vpx_codec_enc_init(&s->codec, interface, &s->cfg, 0); #endif if (res) { ms_error("vpx_codec_enc_init failed: %s (%s)n", vpx_codec_err_to_string(res), vpx_codec_error_detail(&s->codec)); } /*cpu/quality tradeoff: positive values decrease CPU usage at the expense of quality*/ vpx_codec_control(&s->codec, VP8E_SET_CPUUSED, (s->cfg.g_threads > 1) ? 10 : 10); vpx_codec_control(&s->codec, VP8E_SET_STATIC_THRESHOLD, 0); vpx_codec_control(&s->codec, VP8E_SET_ENABLEAUTOALTREF, 1); if (s->cfg.g_threads > 1) { if (vpx_codec_control(&s->codec, VP8E_SET_TOKEN_PARTITIONS, 2) != VPX_CODEC_OK) { ms_error("VP8: failed to set multiple token partition"); } else { ms_message("VP8: multiple token partitions used"); } } #ifdef FRAGMENT_ON_PARTITIONS vpx_codec_control(&s->codec, VP8E_SET_TOKEN_PARTITIONS, 0x3); s->token_partition_count = 8; #endif /* vpx_codec_control(&s->codec, VP8E_SET_CPUUSED, 0);*/ /* -16 (quality) .. 16 (speed) */ video_starter_init(&s->starter); s->ready=TRUE; }
/**************************************************************************** * Decode: the whole thing ****************************************************************************/ static picture_t *Decode(decoder_t *dec, block_t **pp_block) { struct vpx_codec_ctx *ctx = &dec->p_sys->ctx; block_t *block = *pp_block; if (!block) return NULL; if (block->i_flags & (BLOCK_FLAG_DISCONTINUITY|BLOCK_FLAG_CORRUPTED)) return NULL; /* Associate packet PTS with decoded frame */ mtime_t *pkt_pts = malloc(sizeof(*pkt_pts)); if (!pkt_pts) { block_Release(block); *pp_block = NULL; return NULL; } *pkt_pts = block->i_pts; vpx_codec_err_t err; err = vpx_codec_decode(ctx, block->p_buffer, block->i_buffer, pkt_pts, 0); block_Release(block); *pp_block = NULL; if (err != VPX_CODEC_OK) { free(pkt_pts); const char *error = vpx_codec_error(ctx); const char *detail = vpx_codec_error_detail(ctx); if (!detail) detail = "no specific information"; msg_Err(dec, "Failed to decode frame: %s (%s)", error, detail); return NULL; } const void *iter = NULL; struct vpx_image *img = vpx_codec_get_frame(ctx, &iter); if (!img) { free(pkt_pts); return NULL; } /* fetches back the PTS */ pkt_pts = img->user_priv; mtime_t pts = *pkt_pts; free(pkt_pts); if (img->fmt != VPX_IMG_FMT_I420) { msg_Err(dec, "Unsupported output colorspace %d", img->fmt); return NULL; } video_format_t *v = &dec->fmt_out.video; if (img->d_w != v->i_visible_width || img->d_h != v->i_visible_height) { v->i_visible_width = img->d_w; v->i_visible_height = img->d_h; } picture_t *pic = decoder_NewPicture(dec); if (!pic) return NULL; for (int plane = 0; plane < pic->i_planes; plane++ ) { uint8_t *src = img->planes[plane]; uint8_t *dst = pic->p[plane].p_pixels; int src_stride = img->stride[plane]; int dst_stride = pic->p[plane].i_pitch; int size = __MIN( src_stride, dst_stride ); for( int line = 0; line < pic->p[plane].i_visible_lines; line++ ) { memcpy( dst, src, size ); src += src_stride; dst += dst_stride; } } pic->b_progressive = true; /* codec does not support interlacing */ pic->date = pts; return pic; }
int main_loop(int argc, const char **argv_) { vpx_codec_ctx_t decoder; char *fn = NULL; int i; uint8_t *buf = NULL; size_t bytes_in_buffer = 0, buffer_size = 0; FILE *infile; int frame_in = 0, frame_out = 0, flipuv = 0, noblit = 0; int do_md5 = 0, progress = 0; int stop_after = 0, postproc = 0, summary = 0, quiet = 1; int arg_skip = 0; int ec_enabled = 0; const VpxInterface *interface = NULL; const VpxInterface *fourcc_interface = NULL; uint64_t dx_time = 0; struct arg arg; char **argv, **argi, **argj; int single_file; int use_y4m = 1; vpx_codec_dec_cfg_t cfg = {0}; #if CONFIG_VP8_DECODER vp8_postproc_cfg_t vp8_pp_cfg = {0}; int vp8_dbg_color_ref_frame = 0; int vp8_dbg_color_mb_modes = 0; int vp8_dbg_color_b_modes = 0; int vp8_dbg_display_mv = 0; #endif int frames_corrupted = 0; int dec_flags = 0; int do_scale = 0; vpx_image_t *scaled_img = NULL; int frame_avail, got_data; int num_external_frame_buffers = 0; struct ExternalFrameBufferList ext_fb_list = {0}; const char *outfile_pattern = NULL; char outfile_name[PATH_MAX] = {0}; FILE *outfile = NULL; MD5Context md5_ctx; unsigned char md5_digest[16]; struct VpxDecInputContext input = {0}; struct VpxInputContext vpx_input_ctx = {0}; struct WebmInputContext webm_ctx = {0}; input.vpx_input_ctx = &vpx_input_ctx; input.webm_ctx = &webm_ctx; /* Parse command line */ exec_name = argv_[0]; argv = argv_dup(argc - 1, argv_ + 1); for (argi = argj = argv; (*argj = *argi); argi += arg.argv_step) { memset(&arg, 0, sizeof(arg)); arg.argv_step = 1; if (arg_match(&arg, &codecarg, argi)) { interface = get_vpx_decoder_by_name(arg.val); if (!interface) die("Error: Unrecognized argument (%s) to --codec\n", arg.val); } else if (arg_match(&arg, &looparg, argi)) { // no-op } else if (arg_match(&arg, &outputfile, argi)) outfile_pattern = arg.val; else if (arg_match(&arg, &use_yv12, argi)) { use_y4m = 0; flipuv = 1; } else if (arg_match(&arg, &use_i420, argi)) { use_y4m = 0; flipuv = 0; } else if (arg_match(&arg, &flipuvarg, argi)) flipuv = 1; else if (arg_match(&arg, &noblitarg, argi)) noblit = 1; else if (arg_match(&arg, &progressarg, argi)) progress = 1; else if (arg_match(&arg, &limitarg, argi)) stop_after = arg_parse_uint(&arg); else if (arg_match(&arg, &skiparg, argi)) arg_skip = arg_parse_uint(&arg); else if (arg_match(&arg, &postprocarg, argi)) postproc = 1; else if (arg_match(&arg, &md5arg, argi)) do_md5 = 1; else if (arg_match(&arg, &summaryarg, argi)) summary = 1; else if (arg_match(&arg, &threadsarg, argi)) cfg.threads = arg_parse_uint(&arg); else if (arg_match(&arg, &verbosearg, argi)) quiet = 0; else if (arg_match(&arg, &scalearg, argi)) do_scale = 1; else if (arg_match(&arg, &fb_arg, argi)) num_external_frame_buffers = arg_parse_uint(&arg); #if CONFIG_VP8_DECODER else if (arg_match(&arg, &addnoise_level, argi)) { postproc = 1; vp8_pp_cfg.post_proc_flag |= VP8_ADDNOISE; vp8_pp_cfg.noise_level = arg_parse_uint(&arg); } else if (arg_match(&arg, &demacroblock_level, argi)) { postproc = 1; vp8_pp_cfg.post_proc_flag |= VP8_DEMACROBLOCK; vp8_pp_cfg.deblocking_level = arg_parse_uint(&arg); } else if (arg_match(&arg, &deblock, argi)) { postproc = 1; vp8_pp_cfg.post_proc_flag |= VP8_DEBLOCK; } else if (arg_match(&arg, &mfqe, argi)) { postproc = 1; vp8_pp_cfg.post_proc_flag |= VP8_MFQE; } else if (arg_match(&arg, &pp_debug_info, argi)) { unsigned int level = arg_parse_uint(&arg); postproc = 1; vp8_pp_cfg.post_proc_flag &= ~0x7; if (level) vp8_pp_cfg.post_proc_flag |= level; } else if (arg_match(&arg, &pp_disp_ref_frame, argi)) { unsigned int flags = arg_parse_int(&arg); if (flags) { postproc = 1; vp8_dbg_color_ref_frame = flags; } } else if (arg_match(&arg, &pp_disp_mb_modes, argi)) { unsigned int flags = arg_parse_int(&arg); if (flags) { postproc = 1; vp8_dbg_color_mb_modes = flags; } } else if (arg_match(&arg, &pp_disp_b_modes, argi)) { unsigned int flags = arg_parse_int(&arg); if (flags) { postproc = 1; vp8_dbg_color_b_modes = flags; } } else if (arg_match(&arg, &pp_disp_mvs, argi)) { unsigned int flags = arg_parse_int(&arg); if (flags) { postproc = 1; vp8_dbg_display_mv = flags; } } else if (arg_match(&arg, &error_concealment, argi)) { ec_enabled = 1; } #endif else argj++; } /* Check for unrecognized options */ for (argi = argv; *argi; argi++) if (argi[0][0] == '-' && strlen(argi[0]) > 1) die("Error: Unrecognized option %s\n", *argi); /* Handle non-option arguments */ fn = argv[0]; if (!fn) usage_exit(); /* Open file */ infile = strcmp(fn, "-") ? fopen(fn, "rb") : set_binary_mode(stdin); if (!infile) { fprintf(stderr, "Failed to open file '%s'", strcmp(fn, "-") ? fn : "stdin"); return EXIT_FAILURE; } #if CONFIG_OS_SUPPORT /* Make sure we don't dump to the terminal, unless forced to with -o - */ if (!outfile_pattern && isatty(fileno(stdout)) && !do_md5 && !noblit) { fprintf(stderr, "Not dumping raw video to your terminal. Use '-o -' to " "override.\n"); return EXIT_FAILURE; } #endif input.vpx_input_ctx->file = infile; if (file_is_ivf(input.vpx_input_ctx)) input.vpx_input_ctx->file_type = FILE_TYPE_IVF; #if CONFIG_WEBM_IO else if (file_is_webm(input.webm_ctx, input.vpx_input_ctx)) input.vpx_input_ctx->file_type = FILE_TYPE_WEBM; #endif else if (file_is_raw(input.vpx_input_ctx)) input.vpx_input_ctx->file_type = FILE_TYPE_RAW; else { fprintf(stderr, "Unrecognized input file type.\n"); #if !CONFIG_WEBM_IO fprintf(stderr, "vpxdec was built without WebM container support.\n"); #endif return EXIT_FAILURE; } outfile_pattern = outfile_pattern ? outfile_pattern : "-"; single_file = is_single_file(outfile_pattern); if (!noblit && single_file) { generate_filename(outfile_pattern, outfile_name, PATH_MAX, vpx_input_ctx.width, vpx_input_ctx.height, 0); if (do_md5) MD5Init(&md5_ctx); else outfile = open_outfile(outfile_name); } if (use_y4m && !noblit) { if (!single_file) { fprintf(stderr, "YUV4MPEG2 not supported with output patterns," " try --i420 or --yv12.\n"); return EXIT_FAILURE; } #if CONFIG_WEBM_IO if (vpx_input_ctx.file_type == FILE_TYPE_WEBM) { if (webm_guess_framerate(input.webm_ctx, input.vpx_input_ctx)) { fprintf(stderr, "Failed to guess framerate -- error parsing " "webm file?\n"); return EXIT_FAILURE; } } #endif } fourcc_interface = get_vpx_decoder_by_fourcc(vpx_input_ctx.fourcc); if (interface && fourcc_interface && interface != fourcc_interface) warn("Header indicates codec: %s\n", fourcc_interface->name); else interface = fourcc_interface; if (!interface) interface = get_vpx_decoder_by_index(0); dec_flags = (postproc ? VPX_CODEC_USE_POSTPROC : 0) | (ec_enabled ? VPX_CODEC_USE_ERROR_CONCEALMENT : 0); if (vpx_codec_dec_init(&decoder, interface->interface(), &cfg, dec_flags)) { fprintf(stderr, "Failed to initialize decoder: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } if (!quiet) fprintf(stderr, "%s\n", decoder.name); #if CONFIG_VP8_DECODER if (vp8_pp_cfg.post_proc_flag && vpx_codec_control(&decoder, VP8_SET_POSTPROC, &vp8_pp_cfg)) { fprintf(stderr, "Failed to configure postproc: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } if (vp8_dbg_color_ref_frame && vpx_codec_control(&decoder, VP8_SET_DBG_COLOR_REF_FRAME, vp8_dbg_color_ref_frame)) { fprintf(stderr, "Failed to configure reference block visualizer: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } if (vp8_dbg_color_mb_modes && vpx_codec_control(&decoder, VP8_SET_DBG_COLOR_MB_MODES, vp8_dbg_color_mb_modes)) { fprintf(stderr, "Failed to configure macro block visualizer: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } if (vp8_dbg_color_b_modes && vpx_codec_control(&decoder, VP8_SET_DBG_COLOR_B_MODES, vp8_dbg_color_b_modes)) { fprintf(stderr, "Failed to configure block visualizer: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } if (vp8_dbg_display_mv && vpx_codec_control(&decoder, VP8_SET_DBG_DISPLAY_MV, vp8_dbg_display_mv)) { fprintf(stderr, "Failed to configure motion vector visualizer: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } #endif if (arg_skip) fprintf(stderr, "Skipping first %d frames.\n", arg_skip); while (arg_skip) { if (read_frame(&input, &buf, &bytes_in_buffer, &buffer_size)) break; arg_skip--; } if (num_external_frame_buffers > 0) { ext_fb_list.num_external_frame_buffers = num_external_frame_buffers; ext_fb_list.ext_fb = (struct ExternalFrameBuffer *)calloc( num_external_frame_buffers, sizeof(*ext_fb_list.ext_fb)); if (vpx_codec_set_frame_buffer_functions( &decoder, get_vp9_frame_buffer, release_vp9_frame_buffer, &ext_fb_list)) { fprintf(stderr, "Failed to configure external frame buffers: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } } frame_avail = 1; got_data = 0; /* Decode file */ while (frame_avail || got_data) { vpx_codec_iter_t iter = NULL; vpx_image_t *img; struct vpx_usec_timer timer; int corrupted; frame_avail = 0; if (!stop_after || frame_in < stop_after) { if (!read_frame(&input, &buf, &bytes_in_buffer, &buffer_size)) { frame_avail = 1; frame_in++; vpx_usec_timer_start(&timer); if (vpx_codec_decode(&decoder, buf, (unsigned int)bytes_in_buffer, NULL, 0)) { const char *detail = vpx_codec_error_detail(&decoder); warn("Failed to decode frame %d: %s", frame_in, vpx_codec_error(&decoder)); if (detail) warn("Additional information: %s", detail); goto fail; } vpx_usec_timer_mark(&timer); dx_time += vpx_usec_timer_elapsed(&timer); } } vpx_usec_timer_start(&timer); got_data = 0; if ((img = vpx_codec_get_frame(&decoder, &iter))) { ++frame_out; got_data = 1; } vpx_usec_timer_mark(&timer); dx_time += (unsigned int)vpx_usec_timer_elapsed(&timer); if (vpx_codec_control(&decoder, VP8D_GET_FRAME_CORRUPTED, &corrupted)) { warn("Failed VP8_GET_FRAME_CORRUPTED: %s", vpx_codec_error(&decoder)); goto fail; } frames_corrupted += corrupted; if (progress) show_progress(frame_in, frame_out, dx_time); if (!noblit && img) { const int PLANES_YUV[] = {VPX_PLANE_Y, VPX_PLANE_U, VPX_PLANE_V}; const int PLANES_YVU[] = {VPX_PLANE_Y, VPX_PLANE_V, VPX_PLANE_U}; const int *planes = flipuv ? PLANES_YVU : PLANES_YUV; if (do_scale) { if (frame_out == 1) { // If the output frames are to be scaled to a fixed display size then // use the width and height specified in the container. If either of // these is set to 0, use the display size set in the first frame // header. If that is unavailable, use the raw decoded size of the // first decoded frame. int display_width = vpx_input_ctx.width; int display_height = vpx_input_ctx.height; if (!display_width || !display_height) { int display_size[2]; if (vpx_codec_control(&decoder, VP9D_GET_DISPLAY_SIZE, display_size)) { // As last resort use size of first frame as display size. display_width = img->d_w; display_height = img->d_h; } else { display_width = display_size[0]; display_height = display_size[1]; } } scaled_img = vpx_img_alloc(NULL, VPX_IMG_FMT_I420, display_width, display_height, 16); } if (img->d_w != scaled_img->d_w || img->d_h != scaled_img->d_h) { vpx_image_scale(img, scaled_img, kFilterBox); img = scaled_img; } } if (single_file) { if (use_y4m) { char buf[Y4M_BUFFER_SIZE] = {0}; size_t len = 0; if (frame_out == 1) { // Y4M file header len = y4m_write_file_header(buf, sizeof(buf), vpx_input_ctx.width, vpx_input_ctx.height, &vpx_input_ctx.framerate, img->fmt); if (do_md5) { MD5Update(&md5_ctx, (md5byte *)buf, (unsigned int)len); } else { fputs(buf, outfile); } } // Y4M frame header len = y4m_write_frame_header(buf, sizeof(buf)); if (do_md5) { MD5Update(&md5_ctx, (md5byte *)buf, (unsigned int)len); } else { fputs(buf, outfile); } } if (do_md5) { update_image_md5(img, planes, &md5_ctx); } else { write_image_file(img, planes, outfile); } } else { generate_filename(outfile_pattern, outfile_name, PATH_MAX, img->d_w, img->d_h, frame_in); if (do_md5) { MD5Init(&md5_ctx); update_image_md5(img, planes, &md5_ctx); MD5Final(md5_digest, &md5_ctx); print_md5(md5_digest, outfile_name); } else { outfile = open_outfile(outfile_name); write_image_file(img, planes, outfile); fclose(outfile); } } } if (stop_after && frame_in >= stop_after) break; } if (summary || progress) { show_progress(frame_in, frame_out, dx_time); fprintf(stderr, "\n"); } if (frames_corrupted) fprintf(stderr, "WARNING: %d frames corrupted.\n", frames_corrupted); fail: if (vpx_codec_destroy(&decoder)) { fprintf(stderr, "Failed to destroy decoder: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } if (!noblit && single_file) { if (do_md5) { MD5Final(md5_digest, &md5_ctx); print_md5(md5_digest, outfile_name); } else { fclose(outfile); } } #if CONFIG_WEBM_IO if (input.vpx_input_ctx->file_type == FILE_TYPE_WEBM) webm_free(input.webm_ctx); #endif if (input.vpx_input_ctx->file_type != FILE_TYPE_WEBM) free(buf); if (scaled_img) vpx_img_free(scaled_img); for (i = 0; i < ext_fb_list.num_external_frame_buffers; ++i) { free(ext_fb_list.ext_fb[i].data); } free(ext_fb_list.ext_fb); fclose(infile); free(argv); return frames_corrupted ? EXIT_FAILURE : EXIT_SUCCESS; }
static void dec_process(MSFilter *f) { DecState *s = (DecState *)f->data; mblk_t *im; vpx_codec_err_t err; vpx_image_t *img; vpx_codec_iter_t iter = NULL; MSQueue frame; MSQueue mtofree_queue; Vp8RtpFmtFrameInfo frame_info; if (!s->ready){ ms_queue_flush(f->inputs[0]); return; } ms_filter_lock(f); ms_queue_init(&frame); ms_queue_init(&mtofree_queue); /* Unpack RTP payload format for VP8. */ vp8rtpfmt_unpacker_feed(&s->unpacker, f->inputs[0]); /* Decode unpacked VP8 frames. */ while (vp8rtpfmt_unpacker_get_frame(&s->unpacker, &frame, &frame_info) == 0) { while ((im = ms_queue_get(&frame)) != NULL) { err = vpx_codec_decode(&s->codec, im->b_rptr, (unsigned int)(im->b_wptr - im->b_rptr), NULL, 0); if ((s->flags & VPX_CODEC_USE_INPUT_FRAGMENTS) && mblk_get_marker_info(im)) { err = vpx_codec_decode(&s->codec, NULL, 0, NULL, 0); } if (err) { ms_warning("vp8 decode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)?vpx_codec_error_detail(&s->codec):"no details"); } ms_queue_put(&mtofree_queue, im); } /* Get decoded frame */ if ((img = vpx_codec_get_frame(&s->codec, &iter))) { int i, j; int reference_updates = 0; if (vpx_codec_control(&s->codec, VP8D_GET_LAST_REF_UPDATES, &reference_updates) == 0) { if (frame_info.pictureid_present && ((reference_updates & VP8_GOLD_FRAME) || (reference_updates & VP8_ALTR_FRAME))) { vp8rtpfmt_send_rpsi(&s->unpacker, frame_info.pictureid); } } if (s->yuv_width != img->d_w || s->yuv_height != img->d_h) { if (s->yuv_msg) freemsg(s->yuv_msg); s->yuv_msg = ms_yuv_buf_alloc(&s->outbuf, img->d_w, img->d_h); ms_message("MSVp8Dec: video is %ix%i", img->d_w, img->d_h); s->yuv_width = img->d_w; s->yuv_height = img->d_h; ms_filter_notify_no_arg(f, MS_FILTER_OUTPUT_FMT_CHANGED); } /* scale/copy frame to destination mblk_t */ for (i = 0; i < 3; i++) { uint8_t *dest = s->outbuf.planes[i]; uint8_t *src = img->planes[i]; int h = img->d_h >> ((i > 0) ? 1 : 0); for (j = 0; j < h; j++) { memcpy(dest, src, s->outbuf.strides[i]); dest += s->outbuf.strides[i]; src += img->stride[i]; } } ms_queue_put(f->outputs[0], dupmsg(s->yuv_msg)); ms_average_fps_update(&s->fps, (uint32_t)f->ticker->time); if (!s->first_image_decoded) { s->first_image_decoded = TRUE; ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); } } while ((im = ms_queue_get(&mtofree_queue)) != NULL) { freemsg(im); } }
static void enc_process(MSFilter *f) { mblk_t *im; uint64_t timems = f->ticker->time; uint32_t timestamp = (uint32_t)(timems*90); EncState *s = (EncState *)f->data; unsigned int flags = 0; vpx_codec_err_t err; MSPicture yuv; bool_t is_ref_frame=FALSE; ms_filter_lock(f); #ifdef AVPF_DEBUG ms_message("VP8 enc_process:"); #endif if (!s->ready) { ms_queue_flush(f->inputs[0]); ms_filter_unlock(f); return; } if ((im = ms_queue_peek_last(f->inputs[0])) != NULL) { vpx_image_t img; flags = 0; ms_yuv_buf_init_from_mblk(&yuv, im); vpx_img_wrap(&img, VPX_IMG_FMT_I420, s->vconf.vsize.width, s->vconf.vsize.height, 1, yuv.planes[0]); if ((s->avpf_enabled != TRUE) && ms_video_starter_need_i_frame(&s->starter, f->ticker->time)) { s->force_keyframe = TRUE; } if (s->force_keyframe == TRUE) { ms_message("Forcing vp8 key frame for filter [%p]", f); flags = VPX_EFLAG_FORCE_KF; } else if (s->avpf_enabled == TRUE) { if (s->frame_count == 0) s->force_keyframe = TRUE; enc_fill_encoder_flags(s, &flags); } #ifdef AVPF_DEBUG ms_message("VP8 encoder frames state:"); ms_message("\tgolden: count=%" PRIi64 ", picture_id=0x%04x, ack=%s", s->frames_state.golden.count, s->frames_state.golden.picture_id, (s->frames_state.golden.acknowledged == TRUE) ? "Y" : "N"); ms_message("\taltref: count=%" PRIi64 ", picture_id=0x%04x, ack=%s", s->frames_state.altref.count, s->frames_state.altref.picture_id, (s->frames_state.altref.acknowledged == TRUE) ? "Y" : "N"); #endif err = vpx_codec_encode(&s->codec, &img, s->frame_count, 1, flags, 1000000LL/(2*(int)s->vconf.fps)); /*encoder has half a framerate interval to encode*/ if (err) { ms_error("vpx_codec_encode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)); } else { vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; MSList *list = NULL; /* Update the frames state. */ is_ref_frame=FALSE; if (flags & VPX_EFLAG_FORCE_KF) { enc_mark_reference_frame_as_sent(s, VP8_GOLD_FRAME); enc_mark_reference_frame_as_sent(s, VP8_ALTR_FRAME); s->frames_state.golden.is_independant=TRUE; s->frames_state.altref.is_independant=TRUE; s->frames_state.last_independent_frame=s->frame_count; s->force_keyframe = FALSE; is_ref_frame=TRUE; }else if (flags & VP8_EFLAG_FORCE_GF) { enc_mark_reference_frame_as_sent(s, VP8_GOLD_FRAME); is_ref_frame=TRUE; }else if (flags & VP8_EFLAG_FORCE_ARF) { enc_mark_reference_frame_as_sent(s, VP8_ALTR_FRAME); is_ref_frame=TRUE; }else if (flags & VP8_EFLAG_NO_REF_LAST) { enc_mark_reference_frame_as_sent(s, VP8_LAST_FRAME); is_ref_frame=is_reconstruction_frame_sane(s,flags); } if (is_frame_independent(flags)){ s->frames_state.last_independent_frame=s->frame_count; } /* Pack the encoded frame. */ while( (pkt = vpx_codec_get_cx_data(&s->codec, &iter)) ) { if ((pkt->kind == VPX_CODEC_CX_FRAME_PKT) && (pkt->data.frame.sz > 0)) { Vp8RtpFmtPacket *packet = ms_new0(Vp8RtpFmtPacket, 1); packet->m = allocb(pkt->data.frame.sz, 0); memcpy(packet->m->b_wptr, pkt->data.frame.buf, pkt->data.frame.sz); packet->m->b_wptr += pkt->data.frame.sz; mblk_set_timestamp_info(packet->m, timestamp); packet->pd = ms_new0(Vp8RtpFmtPayloadDescriptor, 1); packet->pd->start_of_partition = TRUE; packet->pd->non_reference_frame = s->avpf_enabled && !is_ref_frame; if (s->avpf_enabled == TRUE) { packet->pd->extended_control_bits_present = TRUE; packet->pd->pictureid_present = TRUE; packet->pd->pictureid = s->picture_id; } else { packet->pd->extended_control_bits_present = FALSE; packet->pd->pictureid_present = FALSE; } if (s->flags & VPX_CODEC_USE_OUTPUT_PARTITION) { packet->pd->pid = (uint8_t)pkt->data.frame.partition_id; if (!(pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT)) { mblk_set_marker_info(packet->m, TRUE); } } else { packet->pd->pid = 0; mblk_set_marker_info(packet->m, TRUE); } list = ms_list_append(list, packet); } } #ifdef AVPF_DEBUG ms_message("VP8 encoder picture_id=%i ***| %s | %s | %s | %s", (int)s->picture_id, (flags & VPX_EFLAG_FORCE_KF) ? "KF " : (flags & VP8_EFLAG_FORCE_GF) ? "GF " : (flags & VP8_EFLAG_FORCE_ARF) ? "ARF" : " ", (flags & VP8_EFLAG_NO_REF_GF) ? "NOREFGF" : " ", (flags & VP8_EFLAG_NO_REF_ARF) ? "NOREFARF" : " ", (flags & VP8_EFLAG_NO_REF_LAST) ? "NOREFLAST" : " "); #endif vp8rtpfmt_packer_process(&s->packer, list, f->outputs[0], f->factory); /* Handle video starter if AVPF is not enabled. */ s->frame_count++; if ((s->avpf_enabled != TRUE) && (s->frame_count == 1)) { ms_video_starter_first_frame(&s->starter, f->ticker->time); } /* Increment the pictureID. */ s->picture_id++; #ifdef PICTURE_ID_ON_16_BITS if (s->picture_id == 0) s->picture_id = 0x8000; #else if (s->picture_id == 0x0080) s->picture_id = 0; #endif } } ms_filter_unlock(f); ms_queue_flush(f->inputs[0]); }
static void dec_process(MSFilter *f) { mblk_t *im; DecState *s=(DecState*)f->data; while( (im=ms_queue_get(f->inputs[0]))!=0) { mblk_t *m; dec_unpacketize(f, s, im, &s->q); while((m=ms_queue_get(&s->q))!=NULL){ vpx_codec_err_t err; vpx_codec_iter_t iter = NULL; vpx_image_t *img; err = vpx_codec_decode(&s->codec, m->b_rptr, m->b_wptr - m->b_rptr, NULL, 0); if (err) { ms_warning("vpx_codec_decode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)); if ((f->ticker->time - s->last_error_reported_time)>5000 || s->last_error_reported_time==0) { s->last_error_reported_time=f->ticker->time; ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_DECODING_ERRORS); } if (s->first_image_decoded == FALSE) { /* if no frames have been decoded yet, do not try to browse decoded frames */ freemsg(m); continue; } } /* browse decoded frames */ while((img = vpx_codec_get_frame(&s->codec, &iter))) { int i,j; if (s->yuv_width != img->d_w || s->yuv_height != img->d_h) { if (s->yuv_msg) freemsg(s->yuv_msg); s->yuv_msg = ms_yuv_buf_alloc(&s->outbuf, img->d_w, img->d_h); s->yuv_width = img->d_w; s->yuv_height = img->d_h; } /* scale/copy frame to destination mblk_t */ for(i=0; i<3; i++) { uint8_t* dest = s->outbuf.planes[i]; uint8_t* src = img->planes[i]; int h = img->d_h >> ((i>0)?1:0); for(j=0; j<h; j++) { memcpy(dest, src, s->outbuf.strides[i]); dest += s->outbuf.strides[i]; src += img->stride[i]; } } ms_queue_put(f->outputs[0], dupmsg(s->yuv_msg)); if (ms_video_update_average_fps(&s->fps, f->ticker->time)) { ms_message("VP8 decoder: Frame size: %dx%d", s->yuv_width, s->yuv_height); } if (!s->first_image_decoded) { s->first_image_decoded = TRUE; ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_FIRST_IMAGE_DECODED); } } freemsg(m); } } }
set_shortname("vpx") set_capability("encoder", 60) set_description(N_("WebM video encoder")) set_callbacks(OpenEncoder, CloseEncoder) # define ENC_CFG_PREFIX "sout-vpx-" add_integer( ENC_CFG_PREFIX "quality-mode", VPX_DL_GOOD_QUALITY, QUALITY_MODE_TEXT, QUALITY_MODE_LONGTEXT, true ) change_integer_range( 0, 2 ) #endif vlc_module_end () static void vpx_err_msg(vlc_object_t *this, struct vpx_codec_ctx *ctx, const char *msg) { const char *error = vpx_codec_error(ctx); const char *detail = vpx_codec_error_detail(ctx); if (!detail) detail = "no specific information"; msg_Err(this, msg, error, detail); } #define VPX_ERR(this, ctx, msg) vpx_err_msg(VLC_OBJECT(this), ctx, msg ": %s (%s)") /***************************************************************************** * decoder_sys_t: libvpx decoder descriptor *****************************************************************************/ struct decoder_sys_t { struct vpx_codec_ctx ctx; };
ComponentResult encodeThisSourceFrame(VP8EncoderGlobals glob, ICMCompressorSourceFrameRef sourceFrame) { vpx_codec_err_t codecError; ComponentResult err = noErr; const UInt8 *decoderDataPtr; int storageIndex = 0; //time is multiplied by 2 to allow space for altref frames UInt32 time2 = glob->frameCount * 2; dbg_printf("[vp8e - %08lx] encode this frame %08lx %ld time2 %lu\n", (UInt32)glob, (UInt32)sourceFrame, glob->frameCount, time2); //long dispNumber = ICMCompressorSourceFrameGetDisplayNumber(sourceFrame); // Initialize codec if needed initializeCodec(glob, sourceFrame); /////// Transfer the current frame to glob->raw if (sourceFrame != NULL) { if (glob->currentPass != VPX_RC_FIRST_PASS) addSourceFrame(glob,sourceFrame); err = convertColorSpace(glob, sourceFrame); if (err) goto bail; int flags = 0 ; //TODO - find out what I may need in these flags dbg_printf("[vp8e - %08lx] vpx_codec_encode codec %x raw %x framecount %d flags %x\n", (UInt32)glob, glob->codec, glob->raw, glob->frameCount, flags); //TODO seems like quality should be an option. Right now hardcoded to GOOD_QUALITY codecError = vpx_codec_encode(glob->codec, glob->raw, time2, 1, flags, VPX_DL_GOOD_QUALITY); dbg_printf("[vp8e - %08lx] vpx_codec_encode codec exit\n", (UInt32)glob); } else //sourceFrame is Null. this could be termination of a pass { int flags = 0 ; //TODO - find out what I may need in these flags dbg_printf("[vp8e - %08lx] vpx_codec_encode codec %x raw %x framecount %d ----NULL TERMINATION\n", (UInt32)glob, glob->codec, NULL, glob->frameCount, flags); codecError = vpx_codec_encode(glob->codec, NULL, time2, 1, flags, VPX_DL_GOOD_QUALITY); } glob->frameCount++ ; //framecount gets reset on a new pass if (codecError) { const char *detail = vpx_codec_error_detail(glob->codec); dbg_printf("[vp8e - %08lx] error vpx encode is %s\n", (UInt32)glob, vpx_codec_error(glob->codec)); if (detail) dbg_printf(" %s\n", detail); goto bail; } vpx_codec_iter_t iter = NULL; int got_data = 0; while (1) { const vpx_codec_cx_pkt_t *pkt = vpx_codec_get_cx_data(glob->codec, &iter); if (pkt == NULL) break; got_data ++; switch (pkt->kind) { case VPX_CODEC_CX_FRAME_PKT: err = emitEncodedFrame(glob, pkt); if (err) goto bail; break; case VPX_CODEC_STATS_PKT: if (1) { unsigned long newSize = glob->stats.sz + pkt->data.twopass_stats.sz; glob->stats.buf = realloc(glob->stats.buf, newSize); if (!glob->stats.buf) return mFulErr; dbg_printf("[vp8e - %08lx] Reallocation buffer size to %ld\n", (UInt32)glob, newSize); memcpy((char*)glob->stats.buf + glob->stats.sz, pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz); glob->stats.sz = newSize; } break; default: break; } } if (glob->currentPass == VPX_RC_FIRST_PASS) { //in the first pass no need to export any frames return err; } bail: if (err) dbg_printf("[vp8e - %08lx] bailed with err %d\n", (UInt32)glob, err); return err; }
static void enc_process(MSFilter *f) { mblk_t *im,*om; uint64_t timems=f->ticker->time; uint32_t timestamp=timems*90; EncState *s=(EncState*)f->data; unsigned int flags = 0; vpx_codec_err_t err; YuvBuf yuv; ms_filter_lock(f); while((im=ms_queue_get(f->inputs[0]))!=NULL){ vpx_image_t img; om = NULL; flags = 0; ms_yuv_buf_init_from_mblk(&yuv, im); vpx_img_wrap(&img, VPX_IMG_FMT_I420, s->vconf.vsize.width, s->vconf.vsize.height, 1, yuv.planes[0]); if (video_starter_need_i_frame (&s->starter,f->ticker->time)){ /*sends an I frame at 2 seconds and 4 seconds after the beginning of the call*/ s->req_vfu=TRUE; } if (s->req_vfu){ flags = VPX_EFLAG_FORCE_KF; s->req_vfu=FALSE; } err = vpx_codec_encode(&s->codec, &img, s->frame_count, 1, flags, VPX_DL_REALTIME); if (err) { ms_error("vpx_codec_encode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)); } else { vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; s->frame_count++; if (s->frame_count==1){ video_starter_first_frame (&s->starter,f->ticker->time); } while( (pkt = vpx_codec_get_cx_data(&s->codec, &iter)) ) { if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) { if (pkt->data.frame.sz > 0) { om = allocb(pkt->data.frame.sz,0); memcpy(om->b_wptr, pkt->data.frame.buf, pkt->data.frame.sz); om->b_wptr += pkt->data.frame.sz; #ifdef FRAGMENT_ON_PARTITIONS vp8_fragment_and_send(f, s, om, timestamp, pkt, (pkt->data.frame.partition_id == s->token_partition_count)); #else vp8_fragment_and_send(f, s, om, timestamp, pkt, 1); #endif } } } } freemsg(im); } ms_filter_unlock(f); }
static switch_status_t switch_vpx_decode(switch_codec_t *codec, switch_frame_t *frame) { vpx_context_t *context = (vpx_context_t *)codec->private_info; switch_size_t len; vpx_codec_ctx_t *decoder = NULL; switch_status_t status = SWITCH_STATUS_SUCCESS; int is_start = 0, is_keyframe = 0, get_refresh = 0; if (context->is_vp9) { is_start = is_keyframe = IS_VP9_KEY_FRAME(*(unsigned char *)frame->data); } else { // vp8 is_start = (*(unsigned char *)frame->data & 0x10); is_keyframe = IS_VP8_KEY_FRAME((uint8_t *)frame->data); } // if (is_keyframe) switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_ERROR, "got key %d\n", is_keyframe); if (context->need_decoder_reset != 0) { vpx_codec_destroy(&context->decoder); context->decoder_init = 0; init_decoder(codec); context->need_decoder_reset = 0; } if (!context->decoder_init) { init_decoder(codec); } if (!context->decoder_init) { switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_WARNING, "VPX decoder is not initialized!\n"); return SWITCH_STATUS_FALSE; } decoder = &context->decoder; // switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_INFO, "len: %d ts: %u mark:%d\n", frame->datalen, frame->timestamp, frame->m); // context->last_received_timestamp = frame->timestamp; context->last_received_complete_picture = frame->m ? SWITCH_TRUE : SWITCH_FALSE; if (is_start) { context->got_start_frame = 1; } if (is_keyframe) { if (context->got_key_frame <= 0) { context->got_key_frame = 1; if (!is_keyframe) { get_refresh = 1; } } else { context->got_key_frame++; } } else if (context->got_key_frame <= 0) { if ((--context->got_key_frame % 200) == 0) { switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_DEBUG1, "Waiting for key frame %d\n", context->got_key_frame); } if (!context->got_start_frame) { switch_goto_status(SWITCH_STATUS_MORE_DATA, end); } } status = context->is_vp9 ? buffer_vp9_packets(context, frame) : buffer_vp8_packets(context, frame); if (context->dec_iter && (frame->img = (switch_image_t *) vpx_codec_get_frame(decoder, &context->dec_iter))) { switch_goto_status(SWITCH_STATUS_SUCCESS, end); } //printf("READ buf:%ld got_key:%d st:%d m:%d\n", switch_buffer_inuse(context->vpx_packet_buffer), context->got_key_frame, status, frame->m); len = switch_buffer_inuse(context->vpx_packet_buffer); //if (frame->m && (status != SWITCH_STATUS_SUCCESS || !len)) { //switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_ERROR, "WTF????? %d %ld\n", status, len); //} if (status == SWITCH_STATUS_SUCCESS && frame->m && len) { uint8_t *data; int corrupted = 0; int err; switch_buffer_peek_zerocopy(context->vpx_packet_buffer, (void *)&data); context->dec_iter = NULL; err = vpx_codec_decode(decoder, data, (unsigned int)len, NULL, 0); if (err != VPX_CODEC_OK) { switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_DEBUG1, "Error decoding %" SWITCH_SIZE_T_FMT " bytes, [%d:%s:%s]\n", len, err, vpx_codec_error(decoder), vpx_codec_error_detail(decoder)); switch_goto_status(SWITCH_STATUS_RESTART, end); } if (vpx_codec_control(decoder, VP8D_GET_FRAME_CORRUPTED, &corrupted) != VPX_CODEC_OK) { switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_WARNING, "VPX control error!\n"); switch_goto_status(SWITCH_STATUS_RESTART, end); } if (corrupted) { frame->img = NULL; } else { frame->img = (switch_image_t *) vpx_codec_get_frame(decoder, &context->dec_iter); } switch_buffer_zero(context->vpx_packet_buffer); if (!frame->img) { //context->need_decoder_reset = 1; context->got_key_frame = 0; context->got_start_frame = 0; status = SWITCH_STATUS_RESTART; } } end: if (status == SWITCH_STATUS_RESTART) { switch_buffer_zero(context->vpx_packet_buffer); //context->need_decoder_reset = 1; context->got_key_frame = 0; context->got_start_frame = 0; //switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_WARNING, "RESET VPX\n"); } if (!frame->img || status == SWITCH_STATUS_RESTART) { status = SWITCH_STATUS_MORE_DATA; } if (context->got_key_frame <= 0 || get_refresh) { switch_set_flag(frame, SFF_WAIT_KEY_FRAME); } return status; }
static switch_status_t switch_vpx_encode(switch_codec_t *codec, switch_frame_t *frame) { vpx_context_t *context = (vpx_context_t *)codec->private_info; int width = 0; int height = 0; uint32_t dur; int64_t pts; vpx_enc_frame_flags_t vpx_flags = 0; switch_time_t now; int err; if (frame->flags & SFF_SAME_IMAGE) { return consume_partition(context, frame); } if (context->need_encoder_reset != 0) { reset_codec_encoder(codec); context->need_encoder_reset = 0; } if (frame->img->d_h > 1) { width = frame->img->d_w; height = frame->img->d_h; } else { width = frame->img->w; height = frame->img->h; } if (context->config.g_w != width || context->config.g_h != height) { context->codec_settings.video.width = width; context->codec_settings.video.height = height; reset_codec_encoder(codec); frame->flags |= SFF_PICTURE_RESET; context->need_key_frame = 1; } if (!context->encoder_init) { init_encoder(codec); } if (context->change_bandwidth) { context->codec_settings.video.bandwidth = context->change_bandwidth; context->change_bandwidth = 0; init_encoder(codec); } now = switch_time_now(); if (context->need_key_frame != 0) { // force generate a key frame if (!context->last_key_frame || (now - context->last_key_frame) > KEY_FRAME_MIN_FREQ) { switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_DEBUG1, "VPX KEYFRAME GENERATED\n"); vpx_flags |= VPX_EFLAG_FORCE_KF; context->need_key_frame = 0; context->last_key_frame = now; } } context->framecount++; pts = (now - context->start_time) / 1000; dur = context->last_ms ? (now - context->last_ms) / 1000 : pts; if ((err = vpx_codec_encode(&context->encoder, (vpx_image_t *) frame->img, pts, dur, vpx_flags, VPX_DL_REALTIME)) != VPX_CODEC_OK) { switch_log_printf(SWITCH_CHANNEL_LOG, SWITCH_LOG_ERROR, "VPX encode error %d:%s:%s\n", err, vpx_codec_error(&context->encoder), vpx_codec_error_detail(&context->encoder)); frame->datalen = 0; return SWITCH_STATUS_FALSE; } context->enc_iter = NULL; context->last_ts = frame->timestamp; context->last_ms = now; return consume_partition(context, frame); }
static void enc_preprocess(MSFilter *f) { EncState *s = (EncState *)f->data; vpx_codec_err_t res; vpx_codec_caps_t caps; int cpuused=0; /* Populate encoder configuration */ s->flags = 0; caps = vpx_codec_get_caps(s->iface); if ((s->avpf_enabled == TRUE) && (caps & VPX_CODEC_CAP_OUTPUT_PARTITION)) { s->flags |= VPX_CODEC_USE_OUTPUT_PARTITION; } res = vpx_codec_enc_config_default(s->iface, &s->cfg, 0); if (res) { ms_error("Failed to get config: %s", vpx_codec_err_to_string(res)); return; } s->cfg.rc_target_bitrate = (unsigned int)(((float)s->vconf.required_bitrate) * 0.92f / 1024.0f); //0.92=take into account IP/UDP/RTP overhead, in average. s->cfg.g_pass = VPX_RC_ONE_PASS; /* -p 1 */ s->cfg.g_timebase.num = 1; s->cfg.g_timebase.den = (int)s->vconf.fps; s->cfg.rc_end_usage = VPX_CBR; /* --end-usage=cbr */ if (s->avpf_enabled == TRUE) { s->cfg.kf_mode = VPX_KF_DISABLED; } else { s->cfg.kf_mode = VPX_KF_AUTO; /* encoder automatically places keyframes */ s->cfg.kf_max_dist = 10 * s->cfg.g_timebase.den; /* 1 keyframe each 10s. */ } #if TARGET_IPHONE_SIMULATOR s->cfg.g_threads = 1; /*workaround to remove crash on ipad simulator*/ #else s->cfg.g_threads = ms_factory_get_cpu_count(f->factory); #endif ms_message("VP8 g_threads=%d", s->cfg.g_threads); s->cfg.rc_undershoot_pct = 95; /* --undershoot-pct=95 */ s->cfg.g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT|VPX_ERROR_RESILIENT_PARTITIONS; s->cfg.g_lag_in_frames = 0; #if defined(ANDROID) || (TARGET_OS_IPHONE == 1) || defined(__arm__) || defined(_M_ARM) cpuused = 10 - s->cfg.g_threads; /*cpu/quality tradeoff: positive values decrease CPU usage at the expense of quality*/ if (cpuused < 7) cpuused = 7; /*values beneath 7 consume too much CPU*/ if( s->cfg.g_threads == 1 ){ /* on mono-core iOS devices, we reduce the quality a bit more due to VP8 being slower with new Clang compilers */ cpuused = 16; } #endif s->cfg.g_w = s->vconf.vsize.width; s->cfg.g_h = s->vconf.vsize.height; /* Initialize codec */ res = vpx_codec_enc_init(&s->codec, s->iface, &s->cfg, s->flags); if (res) { ms_error("vpx_codec_enc_init failed: %s (%s)", vpx_codec_err_to_string(res), vpx_codec_error_detail(&s->codec)); return; } vpx_codec_control(&s->codec, VP8E_SET_CPUUSED, cpuused); vpx_codec_control(&s->codec, VP8E_SET_STATIC_THRESHOLD, 0); vpx_codec_control(&s->codec, VP8E_SET_ENABLEAUTOALTREF, !s->avpf_enabled); vpx_codec_control(&s->codec, VP8E_SET_MAX_INTRA_BITRATE_PCT, 400); /*limite iFrame size to 4 pframe*/ if (s->flags & VPX_CODEC_USE_OUTPUT_PARTITION) { vpx_codec_control(&s->codec, VP8E_SET_TOKEN_PARTITIONS, 2); /* Output 4 partitions per frame */ } else { vpx_codec_control(&s->codec, VP8E_SET_TOKEN_PARTITIONS, 0); } s->invalid_frame_reported = FALSE; vp8rtpfmt_packer_init(&s->packer); if (s->avpf_enabled == TRUE) { s->force_keyframe = TRUE; } else if (s->frame_count == 0) { ms_video_starter_init(&s->starter); } s->ready = TRUE; }