int main_loop(int argc, const char **argv_) { vpx_codec_ctx_t decoder; char *fn = NULL; int i; uint8_t *buf = NULL; size_t bytes_in_buffer = 0, buffer_size = 0; FILE *infile; int frame_in = 0, frame_out = 0, flipuv = 0, noblit = 0; int do_md5 = 0, progress = 0; int stop_after = 0, postproc = 0, summary = 0, quiet = 1; int arg_skip = 0; int ec_enabled = 0; const VpxInterface *interface = NULL; const VpxInterface *fourcc_interface = NULL; uint64_t dx_time = 0; struct arg arg; char **argv, **argi, **argj; int single_file; int use_y4m = 1; vpx_codec_dec_cfg_t cfg = {0}; #if CONFIG_VP8_DECODER vp8_postproc_cfg_t vp8_pp_cfg = {0}; int vp8_dbg_color_ref_frame = 0; int vp8_dbg_color_mb_modes = 0; int vp8_dbg_color_b_modes = 0; int vp8_dbg_display_mv = 0; #endif int frames_corrupted = 0; int dec_flags = 0; int do_scale = 0; vpx_image_t *scaled_img = NULL; int frame_avail, got_data; int num_external_frame_buffers = 0; struct ExternalFrameBufferList ext_fb_list = {0}; const char *outfile_pattern = NULL; char outfile_name[PATH_MAX] = {0}; FILE *outfile = NULL; MD5Context md5_ctx; unsigned char md5_digest[16]; struct VpxDecInputContext input = {0}; struct VpxInputContext vpx_input_ctx = {0}; struct WebmInputContext webm_ctx = {0}; input.vpx_input_ctx = &vpx_input_ctx; input.webm_ctx = &webm_ctx; /* Parse command line */ exec_name = argv_[0]; argv = argv_dup(argc - 1, argv_ + 1); for (argi = argj = argv; (*argj = *argi); argi += arg.argv_step) { memset(&arg, 0, sizeof(arg)); arg.argv_step = 1; if (arg_match(&arg, &codecarg, argi)) { interface = get_vpx_decoder_by_name(arg.val); if (!interface) die("Error: Unrecognized argument (%s) to --codec\n", arg.val); } else if (arg_match(&arg, &looparg, argi)) { // no-op } else if (arg_match(&arg, &outputfile, argi)) outfile_pattern = arg.val; else if (arg_match(&arg, &use_yv12, argi)) { use_y4m = 0; flipuv = 1; } else if (arg_match(&arg, &use_i420, argi)) { use_y4m = 0; flipuv = 0; } else if (arg_match(&arg, &flipuvarg, argi)) flipuv = 1; else if (arg_match(&arg, &noblitarg, argi)) noblit = 1; else if (arg_match(&arg, &progressarg, argi)) progress = 1; else if (arg_match(&arg, &limitarg, argi)) stop_after = arg_parse_uint(&arg); else if (arg_match(&arg, &skiparg, argi)) arg_skip = arg_parse_uint(&arg); else if (arg_match(&arg, &postprocarg, argi)) postproc = 1; else if (arg_match(&arg, &md5arg, argi)) do_md5 = 1; else if (arg_match(&arg, &summaryarg, argi)) summary = 1; else if (arg_match(&arg, &threadsarg, argi)) cfg.threads = arg_parse_uint(&arg); else if (arg_match(&arg, &verbosearg, argi)) quiet = 0; else if (arg_match(&arg, &scalearg, argi)) do_scale = 1; else if (arg_match(&arg, &fb_arg, argi)) num_external_frame_buffers = arg_parse_uint(&arg); #if CONFIG_VP8_DECODER else if (arg_match(&arg, &addnoise_level, argi)) { postproc = 1; vp8_pp_cfg.post_proc_flag |= VP8_ADDNOISE; vp8_pp_cfg.noise_level = arg_parse_uint(&arg); } else if (arg_match(&arg, &demacroblock_level, argi)) { postproc = 1; vp8_pp_cfg.post_proc_flag |= VP8_DEMACROBLOCK; vp8_pp_cfg.deblocking_level = arg_parse_uint(&arg); } else if (arg_match(&arg, &deblock, argi)) { postproc = 1; vp8_pp_cfg.post_proc_flag |= VP8_DEBLOCK; } else if (arg_match(&arg, &mfqe, argi)) { postproc = 1; vp8_pp_cfg.post_proc_flag |= VP8_MFQE; } else if (arg_match(&arg, &pp_debug_info, argi)) { unsigned int level = arg_parse_uint(&arg); postproc = 1; vp8_pp_cfg.post_proc_flag &= ~0x7; if (level) vp8_pp_cfg.post_proc_flag |= level; } else if (arg_match(&arg, &pp_disp_ref_frame, argi)) { unsigned int flags = arg_parse_int(&arg); if (flags) { postproc = 1; vp8_dbg_color_ref_frame = flags; } } else if (arg_match(&arg, &pp_disp_mb_modes, argi)) { unsigned int flags = arg_parse_int(&arg); if (flags) { postproc = 1; vp8_dbg_color_mb_modes = flags; } } else if (arg_match(&arg, &pp_disp_b_modes, argi)) { unsigned int flags = arg_parse_int(&arg); if (flags) { postproc = 1; vp8_dbg_color_b_modes = flags; } } else if (arg_match(&arg, &pp_disp_mvs, argi)) { unsigned int flags = arg_parse_int(&arg); if (flags) { postproc = 1; vp8_dbg_display_mv = flags; } } else if (arg_match(&arg, &error_concealment, argi)) { ec_enabled = 1; } #endif else argj++; } /* Check for unrecognized options */ for (argi = argv; *argi; argi++) if (argi[0][0] == '-' && strlen(argi[0]) > 1) die("Error: Unrecognized option %s\n", *argi); /* Handle non-option arguments */ fn = argv[0]; if (!fn) usage_exit(); /* Open file */ infile = strcmp(fn, "-") ? fopen(fn, "rb") : set_binary_mode(stdin); if (!infile) { fprintf(stderr, "Failed to open file '%s'", strcmp(fn, "-") ? fn : "stdin"); return EXIT_FAILURE; } #if CONFIG_OS_SUPPORT /* Make sure we don't dump to the terminal, unless forced to with -o - */ if (!outfile_pattern && isatty(fileno(stdout)) && !do_md5 && !noblit) { fprintf(stderr, "Not dumping raw video to your terminal. Use '-o -' to " "override.\n"); return EXIT_FAILURE; } #endif input.vpx_input_ctx->file = infile; if (file_is_ivf(input.vpx_input_ctx)) input.vpx_input_ctx->file_type = FILE_TYPE_IVF; #if CONFIG_WEBM_IO else if (file_is_webm(input.webm_ctx, input.vpx_input_ctx)) input.vpx_input_ctx->file_type = FILE_TYPE_WEBM; #endif else if (file_is_raw(input.vpx_input_ctx)) input.vpx_input_ctx->file_type = FILE_TYPE_RAW; else { fprintf(stderr, "Unrecognized input file type.\n"); #if !CONFIG_WEBM_IO fprintf(stderr, "vpxdec was built without WebM container support.\n"); #endif return EXIT_FAILURE; } outfile_pattern = outfile_pattern ? outfile_pattern : "-"; single_file = is_single_file(outfile_pattern); if (!noblit && single_file) { generate_filename(outfile_pattern, outfile_name, PATH_MAX, vpx_input_ctx.width, vpx_input_ctx.height, 0); if (do_md5) MD5Init(&md5_ctx); else outfile = open_outfile(outfile_name); } if (use_y4m && !noblit) { if (!single_file) { fprintf(stderr, "YUV4MPEG2 not supported with output patterns," " try --i420 or --yv12.\n"); return EXIT_FAILURE; } #if CONFIG_WEBM_IO if (vpx_input_ctx.file_type == FILE_TYPE_WEBM) { if (webm_guess_framerate(input.webm_ctx, input.vpx_input_ctx)) { fprintf(stderr, "Failed to guess framerate -- error parsing " "webm file?\n"); return EXIT_FAILURE; } } #endif } fourcc_interface = get_vpx_decoder_by_fourcc(vpx_input_ctx.fourcc); if (interface && fourcc_interface && interface != fourcc_interface) warn("Header indicates codec: %s\n", fourcc_interface->name); else interface = fourcc_interface; if (!interface) interface = get_vpx_decoder_by_index(0); dec_flags = (postproc ? VPX_CODEC_USE_POSTPROC : 0) | (ec_enabled ? VPX_CODEC_USE_ERROR_CONCEALMENT : 0); if (vpx_codec_dec_init(&decoder, interface->interface(), &cfg, dec_flags)) { fprintf(stderr, "Failed to initialize decoder: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } if (!quiet) fprintf(stderr, "%s\n", decoder.name); #if CONFIG_VP8_DECODER if (vp8_pp_cfg.post_proc_flag && vpx_codec_control(&decoder, VP8_SET_POSTPROC, &vp8_pp_cfg)) { fprintf(stderr, "Failed to configure postproc: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } if (vp8_dbg_color_ref_frame && vpx_codec_control(&decoder, VP8_SET_DBG_COLOR_REF_FRAME, vp8_dbg_color_ref_frame)) { fprintf(stderr, "Failed to configure reference block visualizer: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } if (vp8_dbg_color_mb_modes && vpx_codec_control(&decoder, VP8_SET_DBG_COLOR_MB_MODES, vp8_dbg_color_mb_modes)) { fprintf(stderr, "Failed to configure macro block visualizer: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } if (vp8_dbg_color_b_modes && vpx_codec_control(&decoder, VP8_SET_DBG_COLOR_B_MODES, vp8_dbg_color_b_modes)) { fprintf(stderr, "Failed to configure block visualizer: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } if (vp8_dbg_display_mv && vpx_codec_control(&decoder, VP8_SET_DBG_DISPLAY_MV, vp8_dbg_display_mv)) { fprintf(stderr, "Failed to configure motion vector visualizer: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } #endif if (arg_skip) fprintf(stderr, "Skipping first %d frames.\n", arg_skip); while (arg_skip) { if (read_frame(&input, &buf, &bytes_in_buffer, &buffer_size)) break; arg_skip--; } if (num_external_frame_buffers > 0) { ext_fb_list.num_external_frame_buffers = num_external_frame_buffers; ext_fb_list.ext_fb = (struct ExternalFrameBuffer *)calloc( num_external_frame_buffers, sizeof(*ext_fb_list.ext_fb)); if (vpx_codec_set_frame_buffer_functions( &decoder, get_vp9_frame_buffer, release_vp9_frame_buffer, &ext_fb_list)) { fprintf(stderr, "Failed to configure external frame buffers: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } } frame_avail = 1; got_data = 0; /* Decode file */ while (frame_avail || got_data) { vpx_codec_iter_t iter = NULL; vpx_image_t *img; struct vpx_usec_timer timer; int corrupted; frame_avail = 0; if (!stop_after || frame_in < stop_after) { if (!read_frame(&input, &buf, &bytes_in_buffer, &buffer_size)) { frame_avail = 1; frame_in++; vpx_usec_timer_start(&timer); if (vpx_codec_decode(&decoder, buf, (unsigned int)bytes_in_buffer, NULL, 0)) { const char *detail = vpx_codec_error_detail(&decoder); warn("Failed to decode frame %d: %s", frame_in, vpx_codec_error(&decoder)); if (detail) warn("Additional information: %s", detail); goto fail; } vpx_usec_timer_mark(&timer); dx_time += vpx_usec_timer_elapsed(&timer); } } vpx_usec_timer_start(&timer); got_data = 0; if ((img = vpx_codec_get_frame(&decoder, &iter))) { ++frame_out; got_data = 1; } vpx_usec_timer_mark(&timer); dx_time += (unsigned int)vpx_usec_timer_elapsed(&timer); if (vpx_codec_control(&decoder, VP8D_GET_FRAME_CORRUPTED, &corrupted)) { warn("Failed VP8_GET_FRAME_CORRUPTED: %s", vpx_codec_error(&decoder)); goto fail; } frames_corrupted += corrupted; if (progress) show_progress(frame_in, frame_out, dx_time); if (!noblit && img) { const int PLANES_YUV[] = {VPX_PLANE_Y, VPX_PLANE_U, VPX_PLANE_V}; const int PLANES_YVU[] = {VPX_PLANE_Y, VPX_PLANE_V, VPX_PLANE_U}; const int *planes = flipuv ? PLANES_YVU : PLANES_YUV; if (do_scale) { if (frame_out == 1) { // If the output frames are to be scaled to a fixed display size then // use the width and height specified in the container. If either of // these is set to 0, use the display size set in the first frame // header. If that is unavailable, use the raw decoded size of the // first decoded frame. int display_width = vpx_input_ctx.width; int display_height = vpx_input_ctx.height; if (!display_width || !display_height) { int display_size[2]; if (vpx_codec_control(&decoder, VP9D_GET_DISPLAY_SIZE, display_size)) { // As last resort use size of first frame as display size. display_width = img->d_w; display_height = img->d_h; } else { display_width = display_size[0]; display_height = display_size[1]; } } scaled_img = vpx_img_alloc(NULL, VPX_IMG_FMT_I420, display_width, display_height, 16); } if (img->d_w != scaled_img->d_w || img->d_h != scaled_img->d_h) { vpx_image_scale(img, scaled_img, kFilterBox); img = scaled_img; } } if (single_file) { if (use_y4m) { char buf[Y4M_BUFFER_SIZE] = {0}; size_t len = 0; if (frame_out == 1) { // Y4M file header len = y4m_write_file_header(buf, sizeof(buf), vpx_input_ctx.width, vpx_input_ctx.height, &vpx_input_ctx.framerate, img->fmt); if (do_md5) { MD5Update(&md5_ctx, (md5byte *)buf, (unsigned int)len); } else { fputs(buf, outfile); } } // Y4M frame header len = y4m_write_frame_header(buf, sizeof(buf)); if (do_md5) { MD5Update(&md5_ctx, (md5byte *)buf, (unsigned int)len); } else { fputs(buf, outfile); } } if (do_md5) { update_image_md5(img, planes, &md5_ctx); } else { write_image_file(img, planes, outfile); } } else { generate_filename(outfile_pattern, outfile_name, PATH_MAX, img->d_w, img->d_h, frame_in); if (do_md5) { MD5Init(&md5_ctx); update_image_md5(img, planes, &md5_ctx); MD5Final(md5_digest, &md5_ctx); print_md5(md5_digest, outfile_name); } else { outfile = open_outfile(outfile_name); write_image_file(img, planes, outfile); fclose(outfile); } } } if (stop_after && frame_in >= stop_after) break; } if (summary || progress) { show_progress(frame_in, frame_out, dx_time); fprintf(stderr, "\n"); } if (frames_corrupted) fprintf(stderr, "WARNING: %d frames corrupted.\n", frames_corrupted); fail: if (vpx_codec_destroy(&decoder)) { fprintf(stderr, "Failed to destroy decoder: %s\n", vpx_codec_error(&decoder)); return EXIT_FAILURE; } if (!noblit && single_file) { if (do_md5) { MD5Final(md5_digest, &md5_ctx); print_md5(md5_digest, outfile_name); } else { fclose(outfile); } } #if CONFIG_WEBM_IO if (input.vpx_input_ctx->file_type == FILE_TYPE_WEBM) webm_free(input.webm_ctx); #endif if (input.vpx_input_ctx->file_type != FILE_TYPE_WEBM) free(buf); if (scaled_img) vpx_img_free(scaled_img); for (i = 0; i < ext_fb_list.num_external_frame_buffers; ++i) { free(ext_fb_list.ext_fb[i].data); } free(ext_fb_list.ext_fb); fclose(infile); free(argv); return frames_corrupted ? EXIT_FAILURE : EXIT_SUCCESS; }
int vp8dx_receive_compressed_data(VP8D_PTR ptr, unsigned long size, const unsigned char *source, INT64 time_stamp) { VP8D_COMP *pbi = (VP8D_COMP *) ptr; VP8_COMMON *cm = &pbi->common; int retcode = 0; struct vpx_usec_timer timer; // if(pbi->ready_for_new_data == 0) // return -1; if (ptr == 0) { return -1; } pbi->common.error.error_code = VPX_CODEC_OK; if (setjmp(pbi->common.error.jmp)) { pbi->common.error.setjmp = 0; return -1; } pbi->common.error.setjmp = 1; #if HAVE_ARMV7 vp8_push_neon(dx_store_reg); #endif vpx_usec_timer_start(&timer); //cm->current_video_frame++; pbi->Source = source; pbi->source_sz = size; retcode = vp8_decode_frame(pbi); if (retcode < 0) { #if HAVE_ARMV7 vp8_pop_neon(dx_store_reg); #endif pbi->common.error.error_code = VPX_CODEC_ERROR; pbi->common.error.setjmp = 0; return retcode; } // Update the GF useage maps. vp8_update_gf_useage_maps(cm, &pbi->mb); if (pbi->b_multithreaded_lf && pbi->common.filter_level != 0) vp8_stop_lfthread(pbi); if (cm->refresh_last_frame) { vp8_swap_yv12_buffer(&cm->last_frame, &cm->new_frame); cm->frame_to_show = &cm->last_frame; } else { cm->frame_to_show = &cm->new_frame; } if (!pbi->b_multithreaded_lf) { struct vpx_usec_timer lpftimer; vpx_usec_timer_start(&lpftimer); // Apply the loop filter if appropriate. if (cm->filter_level > 0) { vp8_loop_filter_frame(cm, &pbi->mb, cm->filter_level); cm->last_frame_type = cm->frame_type; cm->last_filter_type = cm->filter_type; cm->last_sharpness_level = cm->sharpness_level; } vpx_usec_timer_mark(&lpftimer); pbi->time_loop_filtering += vpx_usec_timer_elapsed(&lpftimer); } vp8_yv12_extend_frame_borders_ptr(cm->frame_to_show); #if 0 // DEBUG code //vp8_recon_write_yuv_frame("recon.yuv", cm->frame_to_show); if (cm->current_video_frame <= 5) write_dx_frame_to_file(cm->frame_to_show, cm->current_video_frame); #endif // If any buffer copy / swaping is signalled it should be done here. if (cm->copy_buffer_to_arf) { if (cm->copy_buffer_to_arf == 1) { if (cm->refresh_last_frame) vp8_yv12_copy_frame_ptr(&cm->new_frame, &cm->alt_ref_frame); else vp8_yv12_copy_frame_ptr(&cm->last_frame, &cm->alt_ref_frame); } else if (cm->copy_buffer_to_arf == 2) vp8_yv12_copy_frame_ptr(&cm->golden_frame, &cm->alt_ref_frame); } if (cm->copy_buffer_to_gf) { if (cm->copy_buffer_to_gf == 1) { if (cm->refresh_last_frame) vp8_yv12_copy_frame_ptr(&cm->new_frame, &cm->golden_frame); else vp8_yv12_copy_frame_ptr(&cm->last_frame, &cm->golden_frame); } else if (cm->copy_buffer_to_gf == 2) vp8_yv12_copy_frame_ptr(&cm->alt_ref_frame, &cm->golden_frame); } // Should the golden or alternate reference frame be refreshed? if (cm->refresh_golden_frame || cm->refresh_alt_ref_frame) { if (cm->refresh_golden_frame) vp8_yv12_copy_frame_ptr(cm->frame_to_show, &cm->golden_frame); if (cm->refresh_alt_ref_frame) vp8_yv12_copy_frame_ptr(cm->frame_to_show, &cm->alt_ref_frame); //vpx_log("Decoder: recovery frame received \n"); // Update data structures that monitors GF useage vpx_memset(cm->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols)); cm->gf_active_count = cm->mb_rows * cm->mb_cols; } vp8_clear_system_state(); vpx_usec_timer_mark(&timer); pbi->decode_microseconds = vpx_usec_timer_elapsed(&timer); pbi->time_decoding += pbi->decode_microseconds; // vp8_print_modes_and_motion_vectors( cm->mi, cm->mb_rows,cm->mb_cols, cm->current_video_frame); if (cm->show_frame) cm->current_video_frame++; pbi->ready_for_new_data = 0; pbi->last_time_stamp = time_stamp; #if 0 { int i; INT64 earliest_time = pbi->dr[0].time_stamp; INT64 latest_time = pbi->dr[0].time_stamp; INT64 time_diff = 0; int bytes = 0; pbi->dr[pbi->common.current_video_frame&0xf].size = pbi->bc.pos + pbi->bc2.pos + 4;; pbi->dr[pbi->common.current_video_frame&0xf].time_stamp = time_stamp; for (i = 0; i < 16; i++) { bytes += pbi->dr[i].size; if (pbi->dr[i].time_stamp < earliest_time) earliest_time = pbi->dr[i].time_stamp; if (pbi->dr[i].time_stamp > latest_time) latest_time = pbi->dr[i].time_stamp; } time_diff = latest_time - earliest_time; if (time_diff > 0) { pbi->common.bitrate = 80000.00 * bytes / time_diff ; pbi->common.framerate = 160000000.00 / time_diff ; } } #endif #if HAVE_ARMV7 vp8_pop_neon(dx_store_reg); #endif pbi->common.error.setjmp = 0; return retcode; }
int main(int argc, char **argv) { VpxVideoWriter *outfile[VPX_TS_MAX_LAYERS] = {NULL}; vpx_codec_ctx_t codec; vpx_codec_enc_cfg_t cfg; int frame_cnt = 0; vpx_image_t raw; vpx_codec_err_t res; unsigned int width; unsigned int height; int speed; int frame_avail; int got_data; int flags = 0; unsigned int i; int pts = 0; // PTS starts at 0. int frame_duration = 1; // 1 timebase tick per frame. int layering_mode = 0; int layer_flags[VPX_TS_MAX_PERIODICITY] = {0}; int flag_periodicity = 1; vpx_svc_layer_id_t layer_id = {0, 0}; const VpxInterface *encoder = NULL; FILE *infile = NULL; struct RateControlMetrics rc; int64_t cx_time = 0; const int min_args_base = 11; #if CONFIG_VP9_HIGHBITDEPTH vpx_bit_depth_t bit_depth = VPX_BITS_8; int input_bit_depth = 8; const int min_args = min_args_base + 1; #else const int min_args = min_args_base; #endif // CONFIG_VP9_HIGHBITDEPTH double sum_bitrate = 0.0; double sum_bitrate2 = 0.0; double framerate = 30.0; exec_name = argv[0]; // Check usage and arguments. if (argc < min_args) { #if CONFIG_VP9_HIGHBITDEPTH die("Usage: %s <infile> <outfile> <codec_type(vp8/vp9)> <width> <height> " "<rate_num> <rate_den> <speed> <frame_drop_threshold> <mode> " "<Rate_0> ... <Rate_nlayers-1> <bit-depth> \n", argv[0]); #else die("Usage: %s <infile> <outfile> <codec_type(vp8/vp9)> <width> <height> " "<rate_num> <rate_den> <speed> <frame_drop_threshold> <mode> " "<Rate_0> ... <Rate_nlayers-1> \n", argv[0]); #endif // CONFIG_VP9_HIGHBITDEPTH } encoder = get_vpx_encoder_by_name(argv[3]); if (!encoder) die("Unsupported codec."); printf("Using %s\n", vpx_codec_iface_name(encoder->codec_interface())); width = strtol(argv[4], NULL, 0); height = strtol(argv[5], NULL, 0); if (width < 16 || width % 2 || height < 16 || height % 2) { die("Invalid resolution: %d x %d", width, height); } layering_mode = strtol(argv[10], NULL, 0); if (layering_mode < 0 || layering_mode > 12) { die("Invalid layering mode (0..12) %s", argv[10]); } if (argc != min_args + mode_to_num_layers[layering_mode]) { die("Invalid number of arguments"); } #if CONFIG_VP9_HIGHBITDEPTH switch (strtol(argv[argc-1], NULL, 0)) { case 8: bit_depth = VPX_BITS_8; input_bit_depth = 8; break; case 10: bit_depth = VPX_BITS_10; input_bit_depth = 10; break; case 12: bit_depth = VPX_BITS_12; input_bit_depth = 12; break; default: die("Invalid bit depth (8, 10, 12) %s", argv[argc-1]); } if (!vpx_img_alloc(&raw, bit_depth == VPX_BITS_8 ? VPX_IMG_FMT_I420 : VPX_IMG_FMT_I42016, width, height, 32)) { die("Failed to allocate image", width, height); } #else if (!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, width, height, 32)) { die("Failed to allocate image", width, height); } #endif // CONFIG_VP9_HIGHBITDEPTH // Populate encoder configuration. res = vpx_codec_enc_config_default(encoder->codec_interface(), &cfg, 0); if (res) { printf("Failed to get config: %s\n", vpx_codec_err_to_string(res)); return EXIT_FAILURE; } // Update the default configuration with our settings. cfg.g_w = width; cfg.g_h = height; #if CONFIG_VP9_HIGHBITDEPTH if (bit_depth != VPX_BITS_8) { cfg.g_bit_depth = bit_depth; cfg.g_input_bit_depth = input_bit_depth; cfg.g_profile = 2; } #endif // CONFIG_VP9_HIGHBITDEPTH // Timebase format e.g. 30fps: numerator=1, demoninator = 30. cfg.g_timebase.num = strtol(argv[6], NULL, 0); cfg.g_timebase.den = strtol(argv[7], NULL, 0); speed = strtol(argv[8], NULL, 0); if (speed < 0) { die("Invalid speed setting: must be positive"); } for (i = min_args_base; (int)i < min_args_base + mode_to_num_layers[layering_mode]; ++i) { cfg.ts_target_bitrate[i - 11] = strtol(argv[i], NULL, 0); } // Real time parameters. cfg.rc_dropframe_thresh = strtol(argv[9], NULL, 0); cfg.rc_end_usage = VPX_CBR; cfg.rc_resize_allowed = 0; cfg.rc_min_quantizer = 2; cfg.rc_max_quantizer = 56; cfg.rc_undershoot_pct = 50; cfg.rc_overshoot_pct = 50; cfg.rc_buf_initial_sz = 500; cfg.rc_buf_optimal_sz = 600; cfg.rc_buf_sz = 1000; // Enable error resilient mode. cfg.g_error_resilient = 1; cfg.g_lag_in_frames = 0; cfg.kf_mode = VPX_KF_AUTO; // Disable automatic keyframe placement. cfg.kf_min_dist = cfg.kf_max_dist = 3000; set_temporal_layer_pattern(layering_mode, &cfg, layer_flags, &flag_periodicity); set_rate_control_metrics(&rc, &cfg); // Target bandwidth for the whole stream. // Set to ts_target_bitrate for highest layer (total bitrate). cfg.rc_target_bitrate = cfg.ts_target_bitrate[cfg.ts_number_layers - 1]; // Open input file. if (!(infile = fopen(argv[1], "rb"))) { die("Failed to open %s for reading", argv[1]); } framerate = cfg.g_timebase.den / cfg.g_timebase.num; // Open an output file for each stream. for (i = 0; i < cfg.ts_number_layers; ++i) { char file_name[PATH_MAX]; VpxVideoInfo info; info.codec_fourcc = encoder->fourcc; info.frame_width = cfg.g_w; info.frame_height = cfg.g_h; info.time_base.numerator = cfg.g_timebase.num; info.time_base.denominator = cfg.g_timebase.den; snprintf(file_name, sizeof(file_name), "%s_%d.ivf", argv[2], i); outfile[i] = vpx_video_writer_open(file_name, kContainerIVF, &info); if (!outfile[i]) die("Failed to open %s for writing", file_name); assert(outfile[i] != NULL); } // No spatial layers in this encoder. cfg.ss_number_layers = 1; // Initialize codec. #if CONFIG_VP9_HIGHBITDEPTH if (vpx_codec_enc_init( &codec, encoder->codec_interface(), &cfg, bit_depth == VPX_BITS_8 ? 0 : VPX_CODEC_USE_HIGHBITDEPTH)) #else if (vpx_codec_enc_init(&codec, encoder->codec_interface(), &cfg, 0)) #endif // CONFIG_VP9_HIGHBITDEPTH die_codec(&codec, "Failed to initialize encoder"); if (strncmp(encoder->name, "vp8", 3) == 0) { vpx_codec_control(&codec, VP8E_SET_CPUUSED, -speed); vpx_codec_control(&codec, VP8E_SET_NOISE_SENSITIVITY, kDenoiserOnYOnly); } else if (strncmp(encoder->name, "vp9", 3) == 0) { vpx_codec_control(&codec, VP8E_SET_CPUUSED, speed); vpx_codec_control(&codec, VP9E_SET_AQ_MODE, 3); vpx_codec_control(&codec, VP9E_SET_FRAME_PERIODIC_BOOST, 0); vpx_codec_control(&codec, VP9E_SET_NOISE_SENSITIVITY, 0); if (vpx_codec_control(&codec, VP9E_SET_SVC, layering_mode > 0 ? 1: 0)) { die_codec(&codec, "Failed to set SVC"); } } vpx_codec_control(&codec, VP8E_SET_STATIC_THRESHOLD, 1); vpx_codec_control(&codec, VP8E_SET_TOKEN_PARTITIONS, 1); // This controls the maximum target size of the key frame. // For generating smaller key frames, use a smaller max_intra_size_pct // value, like 100 or 200. { const int max_intra_size_pct = 900; vpx_codec_control(&codec, VP8E_SET_MAX_INTRA_BITRATE_PCT, max_intra_size_pct); } frame_avail = 1; while (frame_avail || got_data) { struct vpx_usec_timer timer; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; // Update the temporal layer_id. No spatial layers in this test. layer_id.spatial_layer_id = 0; layer_id.temporal_layer_id = cfg.ts_layer_id[frame_cnt % cfg.ts_periodicity]; if (strncmp(encoder->name, "vp9", 3) == 0) { vpx_codec_control(&codec, VP9E_SET_SVC_LAYER_ID, &layer_id); } flags = layer_flags[frame_cnt % flag_periodicity]; frame_avail = vpx_img_read(&raw, infile); if (frame_avail) ++rc.layer_input_frames[layer_id.temporal_layer_id]; vpx_usec_timer_start(&timer); if (vpx_codec_encode(&codec, frame_avail? &raw : NULL, pts, 1, flags, VPX_DL_REALTIME)) { die_codec(&codec, "Failed to encode frame"); } vpx_usec_timer_mark(&timer); cx_time += vpx_usec_timer_elapsed(&timer); // Reset KF flag. if (layering_mode != 7) { layer_flags[0] &= ~VPX_EFLAG_FORCE_KF; } got_data = 0; while ( (pkt = vpx_codec_get_cx_data(&codec, &iter)) ) { got_data = 1; switch (pkt->kind) { case VPX_CODEC_CX_FRAME_PKT: for (i = cfg.ts_layer_id[frame_cnt % cfg.ts_periodicity]; i < cfg.ts_number_layers; ++i) { vpx_video_writer_write_frame(outfile[i], pkt->data.frame.buf, pkt->data.frame.sz, pts); ++rc.layer_tot_enc_frames[i]; rc.layer_encoding_bitrate[i] += 8.0 * pkt->data.frame.sz; // Keep count of rate control stats per layer (for non-key frames). if (i == cfg.ts_layer_id[frame_cnt % cfg.ts_periodicity] && !(pkt->data.frame.flags & VPX_FRAME_IS_KEY)) { rc.layer_avg_frame_size[i] += 8.0 * pkt->data.frame.sz; rc.layer_avg_rate_mismatch[i] += fabs(8.0 * pkt->data.frame.sz - rc.layer_pfb[i]) / rc.layer_pfb[i]; ++rc.layer_enc_frames[i]; } } // Update for short-time encoding bitrate states, for moving window // of size rc->window, shifted by rc->window / 2. // Ignore first window segment, due to key frame. if (frame_cnt > rc.window_size) { sum_bitrate += 0.001 * 8.0 * pkt->data.frame.sz * framerate; if (frame_cnt % rc.window_size == 0) { rc.window_count += 1; rc.avg_st_encoding_bitrate += sum_bitrate / rc.window_size; rc.variance_st_encoding_bitrate += (sum_bitrate / rc.window_size) * (sum_bitrate / rc.window_size); sum_bitrate = 0.0; } } // Second shifted window. if (frame_cnt > rc.window_size + rc.window_size / 2) { sum_bitrate2 += 0.001 * 8.0 * pkt->data.frame.sz * framerate; if (frame_cnt > 2 * rc.window_size && frame_cnt % rc.window_size == 0) { rc.window_count += 1; rc.avg_st_encoding_bitrate += sum_bitrate2 / rc.window_size; rc.variance_st_encoding_bitrate += (sum_bitrate2 / rc.window_size) * (sum_bitrate2 / rc.window_size); sum_bitrate2 = 0.0; } } break; default: break; } } ++frame_cnt; pts += frame_duration; } fclose(infile); printout_rate_control_summary(&rc, &cfg, frame_cnt); printf("\n"); printf("Frame cnt and encoding time/FPS stats for encoding: %d %f %f \n", frame_cnt, 1000 * (float)cx_time / (double)(frame_cnt * 1000000), 1000000 * (double)frame_cnt / (double)cx_time); if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec"); // Try to rewrite the output file headers with the actual frame count. for (i = 0; i < cfg.ts_number_layers; ++i) vpx_video_writer_close(outfile[i]); vpx_img_free(&raw); return EXIT_SUCCESS; }
int vp8dx_receive_compressed_data(VP8D_PTR ptr, unsigned long size, const unsigned char *source, INT64 time_stamp) { #if HAVE_ARMV7 INT64 dx_store_reg[8]; #endif VP8D_COMP *pbi = (VP8D_COMP *) ptr; VP8_COMMON *cm = &pbi->common; int retcode = 0; struct vpx_usec_timer timer; /*if(pbi->ready_for_new_data == 0) return -1;*/ if (ptr == 0) { return -1; } pbi->common.error.error_code = VPX_CODEC_OK; if (size == 0) { /* This is used to signal that we are missing frames. * We do not know if the missing frame(s) was supposed to update * any of the reference buffers, but we act conservative and * mark only the last buffer as corrupted. */ cm->yv12_fb[cm->lst_fb_idx].corrupted = 1; /* Signal that we have no frame to show. */ cm->show_frame = 0; /* Nothing more to do. */ return 0; } #if HAVE_ARMV7 #if CONFIG_RUNTIME_CPU_DETECT if (cm->rtcd.flags & HAS_NEON) #endif { vp8_push_neon(dx_store_reg); } #endif cm->new_fb_idx = get_free_fb (cm); if (setjmp(pbi->common.error.jmp)) { #if HAVE_ARMV7 #if CONFIG_RUNTIME_CPU_DETECT if (cm->rtcd.flags & HAS_NEON) #endif { vp8_pop_neon(dx_store_reg); } #endif pbi->common.error.setjmp = 0; /* We do not know if the missing frame(s) was supposed to update * any of the reference buffers, but we act conservative and * mark only the last buffer as corrupted. */ cm->yv12_fb[cm->lst_fb_idx].corrupted = 1; if (cm->fb_idx_ref_cnt[cm->new_fb_idx] > 0) cm->fb_idx_ref_cnt[cm->new_fb_idx]--; return -1; } pbi->common.error.setjmp = 1; vpx_usec_timer_start(&timer); /*cm->current_video_frame++;*/ pbi->Source = source; pbi->source_sz = size; retcode = vp8_decode_frame(pbi); if (retcode < 0) { #if HAVE_ARMV7 #if CONFIG_RUNTIME_CPU_DETECT if (cm->rtcd.flags & HAS_NEON) #endif { vp8_pop_neon(dx_store_reg); } #endif pbi->common.error.error_code = VPX_CODEC_ERROR; pbi->common.error.setjmp = 0; if (cm->fb_idx_ref_cnt[cm->new_fb_idx] > 0) cm->fb_idx_ref_cnt[cm->new_fb_idx]--; return retcode; } #if CONFIG_MULTITHREAD if (pbi->b_multithreaded_rd && cm->multi_token_partition != ONE_PARTITION) { if (swap_frame_buffers (cm)) { #if HAVE_ARMV7 #if CONFIG_RUNTIME_CPU_DETECT if (cm->rtcd.flags & HAS_NEON) #endif { vp8_pop_neon(dx_store_reg); } #endif pbi->common.error.error_code = VPX_CODEC_ERROR; pbi->common.error.setjmp = 0; return -1; } } else #endif { if (swap_frame_buffers (cm)) { #if HAVE_ARMV7 #if CONFIG_RUNTIME_CPU_DETECT if (cm->rtcd.flags & HAS_NEON) #endif { vp8_pop_neon(dx_store_reg); } #endif pbi->common.error.error_code = VPX_CODEC_ERROR; pbi->common.error.setjmp = 0; return -1; } if(pbi->common.filter_level) { struct vpx_usec_timer lpftimer; vpx_usec_timer_start(&lpftimer); /* Apply the loop filter if appropriate. */ vp8_loop_filter_frame(cm, &pbi->mb, cm->filter_level); vpx_usec_timer_mark(&lpftimer); pbi->time_loop_filtering += vpx_usec_timer_elapsed(&lpftimer); cm->last_frame_type = cm->frame_type; cm->last_filter_type = cm->filter_type; cm->last_sharpness_level = cm->sharpness_level; } vp8_yv12_extend_frame_borders_ptr(cm->frame_to_show); } vp8_clear_system_state(); vpx_usec_timer_mark(&timer); pbi->decode_microseconds = vpx_usec_timer_elapsed(&timer); pbi->time_decoding += pbi->decode_microseconds; /*vp8_print_modes_and_motion_vectors( cm->mi, cm->mb_rows,cm->mb_cols, cm->current_video_frame);*/ if (cm->show_frame) cm->current_video_frame++; pbi->ready_for_new_data = 0; pbi->last_time_stamp = time_stamp; #if 0 { int i; INT64 earliest_time = pbi->dr[0].time_stamp; INT64 latest_time = pbi->dr[0].time_stamp; INT64 time_diff = 0; int bytes = 0; pbi->dr[pbi->common.current_video_frame&0xf].size = pbi->bc.pos + pbi->bc2.pos + 4;; pbi->dr[pbi->common.current_video_frame&0xf].time_stamp = time_stamp; for (i = 0; i < 16; i++) { bytes += pbi->dr[i].size; if (pbi->dr[i].time_stamp < earliest_time) earliest_time = pbi->dr[i].time_stamp; if (pbi->dr[i].time_stamp > latest_time) latest_time = pbi->dr[i].time_stamp; } time_diff = latest_time - earliest_time; if (time_diff > 0) { pbi->common.bitrate = 80000.00 * bytes / time_diff ; pbi->common.framerate = 160000000.00 / time_diff ; } } #endif #if HAVE_ARMV7 #if CONFIG_RUNTIME_CPU_DETECT if (cm->rtcd.flags & HAS_NEON) #endif { vp8_pop_neon(dx_store_reg); } #endif pbi->common.error.setjmp = 0; return retcode; }
int main(int argc, const char **argv_) { vpx_codec_ctx_t encoder; const char *in_fn = NULL, *out_fn = NULL, *stats_fn = NULL; int i; FILE *infile, *outfile; vpx_codec_enc_cfg_t cfg; vpx_codec_err_t res; int pass, one_pass_only = 0; stats_io_t stats; vpx_image_t raw; const struct codec_item *codec = codecs; int frame_avail, got_data; struct arg arg; char **argv, **argi, **argj; int arg_usage = 0, arg_passes = 1, arg_deadline = 0; int arg_ctrls[ARG_CTRL_CNT_MAX][2], arg_ctrl_cnt = 0; int arg_limit = 0; static const arg_def_t **ctrl_args = no_args; static const int *ctrl_args_map = NULL; int verbose = 0, show_psnr = 0; int arg_use_i420 = 1; int arg_have_timebase = 0; unsigned long cx_time = 0; unsigned int file_type, fourcc; y4m_input y4m; exec_name = argv_[0]; if (argc < 3) usage_exit(); /* First parse the codec and usage values, because we want to apply other * parameters on top of the default configuration provided by the codec. */ argv = argv_dup(argc - 1, argv_ + 1); for (argi = argj = argv; (*argj = *argi); argi += arg.argv_step) { arg.argv_step = 1; if (arg_match(&arg, &codecarg, argi)) { int j, k = -1; for (j = 0; j < sizeof(codecs) / sizeof(codecs[0]); j++) if (!strcmp(codecs[j].name, arg.val)) k = j; if (k >= 0) codec = codecs + k; else die("Error: Unrecognized argument (%s) to --codec\n", arg.val); } else if (arg_match(&arg, &passes, argi)) { arg_passes = arg_parse_uint(&arg); if (arg_passes < 1 || arg_passes > 2) die("Error: Invalid number of passes (%d)\n", arg_passes); } else if (arg_match(&arg, &pass_arg, argi)) { one_pass_only = arg_parse_uint(&arg); if (one_pass_only < 1 || one_pass_only > 2) die("Error: Invalid pass selected (%d)\n", one_pass_only); } else if (arg_match(&arg, &fpf_name, argi)) stats_fn = arg.val; else if (arg_match(&arg, &usage, argi)) arg_usage = arg_parse_uint(&arg); else if (arg_match(&arg, &deadline, argi)) arg_deadline = arg_parse_uint(&arg); else if (arg_match(&arg, &best_dl, argi)) arg_deadline = VPX_DL_BEST_QUALITY; else if (arg_match(&arg, &good_dl, argi)) arg_deadline = VPX_DL_GOOD_QUALITY; else if (arg_match(&arg, &rt_dl, argi)) arg_deadline = VPX_DL_REALTIME; else if (arg_match(&arg, &use_yv12, argi)) { arg_use_i420 = 0; } else if (arg_match(&arg, &use_i420, argi)) { arg_use_i420 = 1; } else if (arg_match(&arg, &verbosearg, argi)) verbose = 1; else if (arg_match(&arg, &limit, argi)) arg_limit = arg_parse_uint(&arg); else if (arg_match(&arg, &psnrarg, argi)) show_psnr = 1; else argj++; } /* Ensure that --passes and --pass are consistent. If --pass is set and --passes=2, * ensure --fpf was set. */ if (one_pass_only) { /* DWIM: Assume the user meant passes=2 if pass=2 is specified */ if (one_pass_only > arg_passes) { fprintf(stderr, "Warning: Assuming --pass=%d implies --passes=%d\n", one_pass_only, one_pass_only); arg_passes = one_pass_only; } if (arg_passes == 2 && !stats_fn) die("Must specify --fpf when --pass=%d and --passes=2\n", one_pass_only); } /* Populate encoder configuration */ res = vpx_codec_enc_config_default(codec->iface, &cfg, arg_usage); if (res) { fprintf(stderr, "Failed to get config: %s\n", vpx_codec_err_to_string(res)); return EXIT_FAILURE; } /* Now parse the remainder of the parameters. */ for (argi = argj = argv; (*argj = *argi); argi += arg.argv_step) { arg.argv_step = 1; if (0); else if (arg_match(&arg, &threads, argi)) cfg.g_threads = arg_parse_uint(&arg); else if (arg_match(&arg, &profile, argi)) cfg.g_profile = arg_parse_uint(&arg); else if (arg_match(&arg, &width, argi)) cfg.g_w = arg_parse_uint(&arg); else if (arg_match(&arg, &height, argi)) cfg.g_h = arg_parse_uint(&arg); else if (arg_match(&arg, &timebase, argi)) { cfg.g_timebase = arg_parse_rational(&arg); arg_have_timebase = 1; } else if (arg_match(&arg, &error_resilient, argi)) cfg.g_error_resilient = arg_parse_uint(&arg); else if (arg_match(&arg, &lag_in_frames, argi)) cfg.g_lag_in_frames = arg_parse_uint(&arg); else if (arg_match(&arg, &dropframe_thresh, argi)) cfg.rc_dropframe_thresh = arg_parse_uint(&arg); else if (arg_match(&arg, &resize_allowed, argi)) cfg.rc_resize_allowed = arg_parse_uint(&arg); else if (arg_match(&arg, &resize_up_thresh, argi)) cfg.rc_resize_up_thresh = arg_parse_uint(&arg); else if (arg_match(&arg, &resize_down_thresh, argi)) cfg.rc_resize_down_thresh = arg_parse_uint(&arg); else if (arg_match(&arg, &resize_down_thresh, argi)) cfg.rc_resize_down_thresh = arg_parse_uint(&arg); else if (arg_match(&arg, &end_usage, argi)) cfg.rc_end_usage = arg_parse_uint(&arg); else if (arg_match(&arg, &target_bitrate, argi)) cfg.rc_target_bitrate = arg_parse_uint(&arg); else if (arg_match(&arg, &min_quantizer, argi)) cfg.rc_min_quantizer = arg_parse_uint(&arg); else if (arg_match(&arg, &max_quantizer, argi)) cfg.rc_max_quantizer = arg_parse_uint(&arg); else if (arg_match(&arg, &undershoot_pct, argi)) cfg.rc_undershoot_pct = arg_parse_uint(&arg); else if (arg_match(&arg, &overshoot_pct, argi)) cfg.rc_overshoot_pct = arg_parse_uint(&arg); else if (arg_match(&arg, &buf_sz, argi)) cfg.rc_buf_sz = arg_parse_uint(&arg); else if (arg_match(&arg, &buf_initial_sz, argi)) cfg.rc_buf_initial_sz = arg_parse_uint(&arg); else if (arg_match(&arg, &buf_optimal_sz, argi)) cfg.rc_buf_optimal_sz = arg_parse_uint(&arg); else if (arg_match(&arg, &bias_pct, argi)) { cfg.rc_2pass_vbr_bias_pct = arg_parse_uint(&arg); if (arg_passes < 2) fprintf(stderr, "Warning: option %s ignored in one-pass mode.\n", arg.name); } else if (arg_match(&arg, &minsection_pct, argi)) { cfg.rc_2pass_vbr_minsection_pct = arg_parse_uint(&arg); if (arg_passes < 2) fprintf(stderr, "Warning: option %s ignored in one-pass mode.\n", arg.name); } else if (arg_match(&arg, &maxsection_pct, argi)) { cfg.rc_2pass_vbr_maxsection_pct = arg_parse_uint(&arg); if (arg_passes < 2) fprintf(stderr, "Warning: option %s ignored in one-pass mode.\n", arg.name); } else if (arg_match(&arg, &kf_min_dist, argi)) cfg.kf_min_dist = arg_parse_uint(&arg); else if (arg_match(&arg, &kf_max_dist, argi)) cfg.kf_max_dist = arg_parse_uint(&arg); else if (arg_match(&arg, &kf_disabled, argi)) cfg.kf_mode = VPX_KF_DISABLED; else argj++; } /* Handle codec specific options */ #if CONFIG_VP8_ENCODER if (codec->iface == &vpx_codec_vp8_cx_algo) { ctrl_args = vp8_args; ctrl_args_map = vp8_arg_ctrl_map; } #endif for (argi = argj = argv; (*argj = *argi); argi += arg.argv_step) { int match = 0; arg.argv_step = 1; for (i = 0; ctrl_args[i]; i++) { if (arg_match(&arg, ctrl_args[i], argi)) { match = 1; if (arg_ctrl_cnt < ARG_CTRL_CNT_MAX) { arg_ctrls[arg_ctrl_cnt][0] = ctrl_args_map[i]; arg_ctrls[arg_ctrl_cnt][1] = arg_parse_int(&arg); arg_ctrl_cnt++; } } } if (!match) argj++; } /* Check for unrecognized options */ for (argi = argv; *argi; argi++) if (argi[0][0] == '-' && argi[0][1]) die("Error: Unrecognized option %s\n", *argi); /* Handle non-option arguments */ in_fn = argv[0]; out_fn = argv[1]; if (!in_fn || !out_fn) usage_exit(); memset(&stats, 0, sizeof(stats)); for (pass = one_pass_only ? one_pass_only - 1 : 0; pass < arg_passes; pass++) { int frames_in = 0, frames_out = 0; unsigned long nbytes = 0; struct detect_buffer detect; /* Parse certain options from the input file, if possible */ infile = strcmp(in_fn, "-") ? fopen(in_fn, "rb") : stdin; if (!infile) { fprintf(stderr, "Failed to open input file\n"); return EXIT_FAILURE; } fread(detect.buf, 1, 4, infile); detect.valid = 0; if (file_is_y4m(infile, &y4m, detect.buf)) { if (y4m_input_open(&y4m, infile, detect.buf, 4) >= 0) { file_type = FILE_TYPE_Y4M; cfg.g_w = y4m.pic_w; cfg.g_h = y4m.pic_h; /* Use the frame rate from the file only if none was specified * on the command-line. */ if (!arg_have_timebase) { cfg.g_timebase.num = y4m.fps_d; cfg.g_timebase.den = y4m.fps_n; } arg_use_i420 = 0; } else { fprintf(stderr, "Unsupported Y4M stream.\n"); return EXIT_FAILURE; } } else if (file_is_ivf(infile, &fourcc, &cfg.g_w, &cfg.g_h, detect.buf)) { file_type = FILE_TYPE_IVF; switch (fourcc) { case 0x32315659: arg_use_i420 = 0; break; case 0x30323449: arg_use_i420 = 1; break; default: fprintf(stderr, "Unsupported fourcc (%08x) in IVF\n", fourcc); return EXIT_FAILURE; } } else { file_type = FILE_TYPE_RAW; detect.valid = 1; } #define SHOW(field) fprintf(stderr, " %-28s = %d\n", #field, cfg.field) if (verbose && pass == 0) { fprintf(stderr, "Codec: %s\n", vpx_codec_iface_name(codec->iface)); fprintf(stderr, "Source file: %s Format: %s\n", in_fn, arg_use_i420 ? "I420" : "YV12"); fprintf(stderr, "Destination file: %s\n", out_fn); fprintf(stderr, "Encoder parameters:\n"); SHOW(g_usage); SHOW(g_threads); SHOW(g_profile); SHOW(g_w); SHOW(g_h); SHOW(g_timebase.num); SHOW(g_timebase.den); SHOW(g_error_resilient); SHOW(g_pass); SHOW(g_lag_in_frames); SHOW(rc_dropframe_thresh); SHOW(rc_resize_allowed); SHOW(rc_resize_up_thresh); SHOW(rc_resize_down_thresh); SHOW(rc_end_usage); SHOW(rc_target_bitrate); SHOW(rc_min_quantizer); SHOW(rc_max_quantizer); SHOW(rc_undershoot_pct); SHOW(rc_overshoot_pct); SHOW(rc_buf_sz); SHOW(rc_buf_initial_sz); SHOW(rc_buf_optimal_sz); SHOW(rc_2pass_vbr_bias_pct); SHOW(rc_2pass_vbr_minsection_pct); SHOW(rc_2pass_vbr_maxsection_pct); SHOW(kf_mode); SHOW(kf_min_dist); SHOW(kf_max_dist); } if(pass == (one_pass_only ? one_pass_only - 1 : 0)) { if (file_type == FILE_TYPE_Y4M) /*The Y4M reader does its own allocation. Just initialize this here to avoid problems if we never read any frames.*/ memset(&raw, 0, sizeof(raw)); else vpx_img_alloc(&raw, arg_use_i420 ? VPX_IMG_FMT_I420 : VPX_IMG_FMT_YV12, cfg.g_w, cfg.g_h, 1); // This was added so that ivfenc will create monotically increasing // timestamps. Since we create new timestamps for alt-reference frames // we need to make room in the series of timestamps. Since there can // only be 1 alt-ref frame ( current bitstream) multiplying by 2 // gives us enough room. cfg.g_timebase.den *= 2; } outfile = strcmp(out_fn, "-") ? fopen(out_fn, "wb") : stdout; if (!outfile) { fprintf(stderr, "Failed to open output file\n"); return EXIT_FAILURE; } if (stats_fn) { if (!stats_open_file(&stats, stats_fn, pass)) { fprintf(stderr, "Failed to open statistics store\n"); return EXIT_FAILURE; } } else { if (!stats_open_mem(&stats, pass)) { fprintf(stderr, "Failed to open statistics store\n"); return EXIT_FAILURE; } } cfg.g_pass = arg_passes == 2 ? pass ? VPX_RC_LAST_PASS : VPX_RC_FIRST_PASS : VPX_RC_ONE_PASS; #if VPX_ENCODER_ABI_VERSION > (1 + VPX_CODEC_ABI_VERSION) if (pass) { cfg.rc_twopass_stats_in = stats_get(&stats); } #endif write_ivf_file_header(outfile, &cfg, codec->fourcc, 0); /* Construct Encoder Context */ vpx_codec_enc_init(&encoder, codec->iface, &cfg, show_psnr ? VPX_CODEC_USE_PSNR : 0); ctx_exit_on_error(&encoder, "Failed to initialize encoder"); /* Note that we bypass the vpx_codec_control wrapper macro because * we're being clever to store the control IDs in an array. Real * applications will want to make use of the enumerations directly */ for (i = 0; i < arg_ctrl_cnt; i++) { if (vpx_codec_control_(&encoder, arg_ctrls[i][0], arg_ctrls[i][1])) fprintf(stderr, "Error: Tried to set control %d = %d\n", arg_ctrls[i][0], arg_ctrls[i][1]); ctx_exit_on_error(&encoder, "Failed to control codec"); } frame_avail = 1; got_data = 0; while (frame_avail || got_data) { vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; struct vpx_usec_timer timer; if (!arg_limit || frames_in < arg_limit) { frame_avail = read_frame(infile, &raw, file_type, &y4m, &detect); if (frame_avail) frames_in++; fprintf(stderr, "\rPass %d/%d frame %4d/%-4d %7ldB \033[K", pass + 1, arg_passes, frames_in, frames_out, nbytes); } else frame_avail = 0; vpx_usec_timer_start(&timer); // since we halved our timebase we need to double the timestamps // and duration we pass in. vpx_codec_encode(&encoder, frame_avail ? &raw : NULL, (frames_in - 1) * 2, 2, 0, arg_deadline); vpx_usec_timer_mark(&timer); cx_time += vpx_usec_timer_elapsed(&timer); ctx_exit_on_error(&encoder, "Failed to encode frame"); got_data = 0; while ((pkt = vpx_codec_get_cx_data(&encoder, &iter))) { got_data = 1; switch (pkt->kind) { case VPX_CODEC_CX_FRAME_PKT: frames_out++; fprintf(stderr, " %6luF", (unsigned long)pkt->data.frame.sz); write_ivf_frame_header(outfile, pkt); fwrite(pkt->data.frame.buf, 1, pkt->data.frame.sz, outfile); nbytes += pkt->data.raw.sz; break; case VPX_CODEC_STATS_PKT: frames_out++; fprintf(stderr, " %6luS", (unsigned long)pkt->data.twopass_stats.sz); stats_write(&stats, pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz); nbytes += pkt->data.raw.sz; break; case VPX_CODEC_PSNR_PKT: if (show_psnr) { int i; for (i = 0; i < 4; i++) fprintf(stderr, "%.3lf ", pkt->data.psnr.psnr[i]); } break; default: break; } } fflush(stdout); } /* this bitrate calc is simplified and relies on the fact that this * application uses 1/timebase for framerate. */ fprintf(stderr, "\rPass %d/%d frame %4d/%-4d %7ldB %7ldb/f %7"PRId64"b/s" " %7lu %s (%.2f fps)\033[K", pass + 1, arg_passes, frames_in, frames_out, nbytes, nbytes * 8 / frames_in, nbytes * 8 *(int64_t)cfg.g_timebase.den/2/ cfg.g_timebase.num / frames_in, cx_time > 9999999 ? cx_time / 1000 : cx_time, cx_time > 9999999 ? "ms" : "us", (float)frames_in * 1000000.0 / (float)cx_time); vpx_codec_destroy(&encoder); fclose(infile); if (!fseek(outfile, 0, SEEK_SET)) write_ivf_file_header(outfile, &cfg, codec->fourcc, frames_out); fclose(outfile); stats_close(&stats); fprintf(stderr, "\n"); if (one_pass_only) break; } vpx_img_free(&raw); free(argv); return EXIT_SUCCESS; }
static vpx_codec_err_t decode_one_recon_ex(vpx_codec_alg_priv_t *ctx, const uint8_t **data, unsigned int data_sz, void *user_priv, int64_t deadline, void *texture) { vpx_codec_err_t res = VPX_CODEC_OK; VP9D_COMP *pbi; VP9D_COMP *pbi_storage; VP9D_COMP *my_pbi; static int flag = 0; int i_is_last_frame = 0; int ret = -1; struct vpx_usec_timer timer; unsigned long yuv2rgb_time = 0; unsigned long decode_time = 0; // ctx->img_avail = 0; vpx_usec_timer_start(&timer); if (data_sz == 0) { pbi = (VP9D_COMP *)ctx->pbi; if (!pbi->l_bufpool_flag_output) { return 0; } } /* Determine the stream parameters. Note that we rely on peek_si to * validate that we have a buffer that does not wrap around the top * of the heap. */ if (!ctx->si.h) res = ctx->base.iface->dec.peek_si(*data, data_sz, &ctx->si); /* Perform deferred allocations, if required */ if (!res && ctx->defer_alloc) { int i; for (i = 1; !res && i < NELEMENTS(ctx->mmaps); i++) { vpx_codec_dec_cfg_t cfg; cfg.w = ctx->si.w; cfg.h = ctx->si.h; ctx->mmaps[i].id = vp9_mem_req_segs[i].id; ctx->mmaps[i].sz = vp9_mem_req_segs[i].sz; ctx->mmaps[i].align = vp9_mem_req_segs[i].align; ctx->mmaps[i].flags = vp9_mem_req_segs[i].flags; if (!ctx->mmaps[i].sz) ctx->mmaps[i].sz = vp9_mem_req_segs[i].calc_sz(&cfg, ctx->base.init_flags); res = vpx_mmap_alloc(&ctx->mmaps[i]); } if (!res) vp9_finalize_mmaps(ctx); ctx->defer_alloc = 0; } /* Initialize the decoder instance on the first frame*/ if (!res && !ctx->decoder_init) { res = vpx_validate_mmaps(&ctx->si, ctx->mmaps, vp9_mem_req_segs, NELEMENTS(vp9_mem_req_segs), ctx->base.init_flags); if (!res) { VP9D_CONFIG oxcf; VP9D_PTR optr; VP9D_COMP *const new_pbi = vpx_memalign(32, sizeof(VP9D_COMP)); VP9D_COMP *const new_pbi_two = vpx_memalign(32, sizeof(VP9D_COMP)); vp9_initialize_dec(); oxcf.width = ctx->si.w; oxcf.height = ctx->si.h; oxcf.version = 9; oxcf.postprocess = 0; oxcf.max_threads = ctx->cfg.threads; oxcf.inv_tile_order = ctx->invert_tile_order; optr = vp9_create_decompressor_recon(&oxcf); vp9_zero(*new_pbi); vp9_zero(*new_pbi_two); // If postprocessing was enabled by the application and a // configuration has not been provided, default it. if (!ctx->postproc_cfg_set && (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)) { ctx->postproc_cfg.post_proc_flag = VP8_DEBLOCK | VP8_DEMACROBLOCK; ctx->postproc_cfg.deblocking_level = 4; ctx->postproc_cfg.noise_level = 0; } if (!optr) { res = VPX_CODEC_ERROR; } else { VP9D_COMP *const pbi = (VP9D_COMP*)optr; VP9_COMMON *const cm = &pbi->common; VP9_COMMON *const cm0 = &new_pbi->common; VP9_COMMON *const cm1 = &new_pbi_two->common; if (ctx->fb_list != NULL && ctx->realloc_fb_cb != NULL && ctx->fb_count > 0) { cm->fb_list = ctx->fb_list; cm->fb_count = ctx->fb_count; cm->realloc_fb_cb = ctx->realloc_fb_cb; cm->user_priv = ctx->user_priv; CpuFlag = 1; } else { CpuFlag = 0; cm->fb_count = FRAME_BUFFERS; } cm->fb_lru = ctx->fb_lru; CHECK_MEM_ERROR(cm, cm->yv12_fb, vpx_calloc(cm->fb_count, sizeof(*cm->yv12_fb))); CHECK_MEM_ERROR(cm, cm->fb_idx_ref_cnt, vpx_calloc(cm->fb_count, sizeof(*cm->fb_idx_ref_cnt))); if (cm->fb_lru) { CHECK_MEM_ERROR(cm, cm->fb_idx_ref_lru, vpx_calloc(cm->fb_count, sizeof(*cm->fb_idx_ref_lru))); } ctx->pbi = optr; ctx->storage_pbi[0] = new_pbi; ctx->storage_pbi[1] = new_pbi_two; // cm 0 if (ctx->fb_list != NULL && ctx->realloc_fb_cb != NULL && ctx->fb_count > 0) { cm0->fb_list = ctx->fb_list; cm0->fb_count = ctx->fb_count; cm0->realloc_fb_cb = ctx->realloc_fb_cb; cm0->user_priv = ctx->user_priv; } else { cm0->fb_count = FRAME_BUFFERS; } cm0->fb_lru = ctx->fb_lru; // CHECK_MEM_ERROR(cm, cm->yv12_fb, // vpx_calloc(cm->fb_count, sizeof(*cm->yv12_fb))); CHECK_MEM_ERROR(cm0, cm0->fb_idx_ref_cnt, vpx_calloc(cm0->fb_count, sizeof(*cm0->fb_idx_ref_cnt))); if (cm0->fb_lru) { CHECK_MEM_ERROR(cm0, cm0->fb_idx_ref_lru, vpx_calloc(cm0->fb_count, sizeof(*cm0->fb_idx_ref_lru))); } // cm 1 if (ctx->fb_list != NULL && ctx->realloc_fb_cb != NULL && ctx->fb_count > 0) { cm1->fb_list = ctx->fb_list; cm1->fb_count = ctx->fb_count; cm1->realloc_fb_cb = ctx->realloc_fb_cb; cm1->user_priv = ctx->user_priv; } else { cm1->fb_count = FRAME_BUFFERS; } cm1->fb_lru = ctx->fb_lru; // CHECK_MEM_ERROR(cm, cm->yv12_fb, // vpx_calloc(cm->fb_count, sizeof(*cm->yv12_fb))); CHECK_MEM_ERROR(cm1, cm1->fb_idx_ref_cnt, vpx_calloc(cm1->fb_count, sizeof(*cm1->fb_idx_ref_cnt))); if (cm1->fb_lru) { CHECK_MEM_ERROR(cm1, cm1->fb_idx_ref_lru, vpx_calloc(cm1->fb_count, sizeof(*cm1->fb_idx_ref_lru))); } } } ctx->decoder_init = 1; } if (!res && ctx->pbi) { YV12_BUFFER_CONFIG sd; int64_t time_stamp = 0, time_end_stamp = 0; vp9_ppflags_t flags = {0}; if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC) { flags.post_proc_flag = #if CONFIG_POSTPROC_VISUALIZER (ctx->dbg_color_ref_frame_flag ? VP9D_DEBUG_CLR_FRM_REF_BLKS : 0) | (ctx->dbg_color_mb_modes_flag ? VP9D_DEBUG_CLR_BLK_MODES : 0) | (ctx->dbg_color_b_modes_flag ? VP9D_DEBUG_CLR_BLK_MODES : 0) | (ctx->dbg_display_mv_flag ? VP9D_DEBUG_DRAW_MV : 0) | #endif ctx->postproc_cfg.post_proc_flag; flags.deblocking_level = ctx->postproc_cfg.deblocking_level; flags.noise_level = ctx->postproc_cfg.noise_level; #if CONFIG_POSTPROC_VISUALIZER flags.display_ref_frame_flag = ctx->dbg_color_ref_frame_flag; flags.display_mb_modes_flag = ctx->dbg_color_mb_modes_flag; flags.display_b_modes_flag = ctx->dbg_color_b_modes_flag; flags.display_mv_flag = ctx->dbg_display_mv_flag; #endif } #if 0 if (vp9_receive_compressed_data(ctx->pbi, data_sz, data, deadline)) { VP9D_COMP *pbi = (VP9D_COMP*)ctx->pbi; res = update_error_state(ctx, &pbi->common.error); } if (!res && 0 == vp9_get_raw_frame(ctx->pbi, &sd, &time_stamp, &time_end_stamp, &flags)) { yuvconfig2image(&ctx->img, &sd, user_priv); ctx->img_avail = 1; } #endif if (data_sz == 0) { i_is_last_frame = 1; } if (vp9_receive_compressed_data_recon(ctx->pbi, ctx->storage_pbi, data_sz, data, deadline, i_is_last_frame)) { pbi = (VP9D_COMP *)ctx->pbi; if (pbi->l_bufpool_flag_output == 0) pbi_storage = (VP9D_COMP *)ctx->storage_pbi[1]; else pbi_storage = (VP9D_COMP *)ctx->storage_pbi[pbi->l_bufpool_flag_output & 1]; res = update_error_state(ctx, &pbi_storage->common.error); } vpx_usec_timer_mark(&timer); decode_time = (unsigned int)vpx_usec_timer_elapsed(&timer); if (ctx->pbi) { pbi = (VP9D_COMP *)ctx->pbi; if (pbi->l_bufpool_flag_output) { ret = vp9_get_raw_frame(ctx->storage_pbi[pbi->l_bufpool_flag_output & 1], &sd, &time_stamp, &time_end_stamp, &flags); if (!pbi->res && 0 == ret ) { //for render my_pbi = (VP9D_COMP *)(ctx->storage_pbi[pbi->l_bufpool_flag_output & 1]); yuv2rgba_ocl_obj.y_plane_offset = my_pbi->common.frame_to_show->y_buffer - inter_ocl_obj.buffer_pool_map_ptr; yuv2rgba_ocl_obj.u_plane_offset = my_pbi->common.frame_to_show->u_buffer - inter_ocl_obj.buffer_pool_map_ptr; yuv2rgba_ocl_obj.v_plane_offset = my_pbi->common.frame_to_show->v_buffer - inter_ocl_obj.buffer_pool_map_ptr; yuv2rgba_ocl_obj.Y_stride = my_pbi->common.frame_to_show->y_stride; yuv2rgba_ocl_obj.UV_stride = my_pbi->common.frame_to_show->uv_stride; yuv2rgba_ocl_obj.globalThreads[0] = my_pbi->common.width >> 1; yuv2rgba_ocl_obj.globalThreads[1] = my_pbi->common.height >> 1; vpx_usec_timer_start(&timer); vp9_yuv2rgba(&yuv2rgba_ocl_obj, texture); vpx_usec_timer_mark(&timer); yuv2rgb_time = (unsigned int)vpx_usec_timer_elapsed(&timer); fprintf(pLog, "decode one frame time(without YUV to RGB): %lu us\n" "the whole time of YUV to RGB: %lu us\n", decode_time, yuv2rgb_time); // for render end yuvconfig2image(&ctx->img, &sd, user_priv); ctx->img_avail = 1; }