HRESULT GetSampleFromMFStreamer(/* out */ const vpx_codec_cx_pkt_t *& vpkt) { //printf("Get Sample...\n"); IMFSample *videoSample = NULL; // Initial read results in a null pSample?? CHECK_HR(videoReader->ReadSample( MF_SOURCE_READER_ANY_STREAM, // Stream index. 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llVideoTimeStamp, // Receives the time stamp. &videoSample // Receives the sample or NULL. ), L"Error reading video sample."); if (!videoSample) { printf("Failed to get video sample from MF.\n"); } else { DWORD nCurrBufferCount = 0; CHECK_HR(videoSample->GetBufferCount(&nCurrBufferCount), L"Failed to get the buffer count from the video sample.\n"); IMFMediaBuffer * pMediaBuffer; CHECK_HR(videoSample->ConvertToContiguousBuffer(&pMediaBuffer), L"Failed to extract the video sample into a raw buffer.\n"); DWORD nCurrLen = 0; CHECK_HR(pMediaBuffer->GetCurrentLength(&nCurrLen), L"Failed to get the length of the raw buffer holding the video sample.\n"); byte *imgBuff; DWORD buffCurrLen = 0; DWORD buffMaxLen = 0; pMediaBuffer->Lock(&imgBuff, &buffMaxLen, &buffCurrLen); /*BYTE *i420 = new BYTE[4608000]; YUY2ToI420(WIDTH, HEIGHT, STRIDE, imgBuff, i420); vpx_image_t* img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, i420);*/ vpx_image_t* const img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, imgBuff); const vpx_codec_cx_pkt_t * pkt; vpx_enc_frame_flags_t flags = 0; if (vpx_codec_encode(&_vpxCodec, &_rawImage, _sampleCount, 1, flags, VPX_DL_REALTIME)) { printf("VPX codec failed to encode the frame.\n"); return -1; } else { vpx_codec_iter_t iter = NULL; while ((pkt = vpx_codec_get_cx_data(&_vpxCodec, &iter))) { switch (pkt->kind) { case VPX_CODEC_CX_FRAME_PKT: vpkt = pkt; // const_cast<vpx_codec_cx_pkt_t **>(&pkt); break; default: break; } printf("%s %i\n", pkt->kind == VPX_CODEC_CX_FRAME_PKT && (pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? "K" : ".", pkt->data.frame.sz); } } _sampleCount++; vpx_img_free(img); pMediaBuffer->Unlock(); pMediaBuffer->Release(); //delete i420; videoSample->Release(); return S_OK; } }
static void enc_process(MSFilter *f) { mblk_t *im,*om; uint64_t timems=f->ticker->time; uint32_t timestamp=timems*90; EncState *s=(EncState*)f->data; unsigned int flags = 0; vpx_codec_err_t err; YuvBuf yuv; ms_filter_lock(f); while((im=ms_queue_get(f->inputs[0]))!=NULL){ vpx_image_t img; om = NULL; flags = 0; ms_yuv_buf_init_from_mblk(&yuv, im); vpx_img_wrap(&img, VPX_IMG_FMT_I420, s->vconf.vsize.width, s->vconf.vsize.height, 1, yuv.planes[0]); if (video_starter_need_i_frame (&s->starter,f->ticker->time)){ /*sends an I frame at 2 seconds and 4 seconds after the beginning of the call*/ s->req_vfu=TRUE; } if (s->req_vfu){ flags = VPX_EFLAG_FORCE_KF; s->req_vfu=FALSE; } err = vpx_codec_encode(&s->codec, &img, s->frame_count, 1, flags, VPX_DL_REALTIME); if (err) { ms_error("vpx_codec_encode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)); } else { vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; s->frame_count++; if (s->frame_count==1){ video_starter_first_frame (&s->starter,f->ticker->time); } while( (pkt = vpx_codec_get_cx_data(&s->codec, &iter)) ) { if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) { if (pkt->data.frame.sz > 0) { om = allocb(pkt->data.frame.sz,0); memcpy(om->b_wptr, pkt->data.frame.buf, pkt->data.frame.sz); om->b_wptr += pkt->data.frame.sz; #ifdef FRAGMENT_ON_PARTITIONS vp8_fragment_and_send(f, s, om, timestamp, pkt, (pkt->data.frame.partition_id == s->token_partition_count)); #else vp8_fragment_and_send(f, s, om, timestamp, pkt, 1); #endif } } } } freemsg(im); } ms_filter_unlock(f); }
/* * Method: img_wrap */ JNIEXPORT void JNICALL Java_org_jitsi_impl_neomedia_codec_video_VPX_img_1wrap (JNIEnv *env, jclass clazz, jlong img, jint fmt, jint d_w, jint d_h, jint align, jlong data) { vpx_img_wrap((vpx_image_t *) (intptr_t) img, (vpx_img_fmt_t) fmt, (unsigned int) d_w, (unsigned int) d_h, (unsigned int) align, (unsigned char *) (intptr_t) data); }
static void enc_process(MSFilter *f) { mblk_t *im; uint64_t timems = f->ticker->time; uint32_t timestamp = (uint32_t)(timems*90); EncState *s = (EncState *)f->data; unsigned int flags = 0; vpx_codec_err_t err; MSPicture yuv; bool_t is_ref_frame=FALSE; ms_filter_lock(f); #ifdef AVPF_DEBUG ms_message("VP8 enc_process:"); #endif if (!s->ready) { ms_queue_flush(f->inputs[0]); ms_filter_unlock(f); return; } if ((im = ms_queue_peek_last(f->inputs[0])) != NULL) { vpx_image_t img; flags = 0; ms_yuv_buf_init_from_mblk(&yuv, im); vpx_img_wrap(&img, VPX_IMG_FMT_I420, s->vconf.vsize.width, s->vconf.vsize.height, 1, yuv.planes[0]); if ((s->avpf_enabled != TRUE) && ms_video_starter_need_i_frame(&s->starter, f->ticker->time)) { s->force_keyframe = TRUE; } if (s->force_keyframe == TRUE) { ms_message("Forcing vp8 key frame for filter [%p]", f); flags = VPX_EFLAG_FORCE_KF; } else if (s->avpf_enabled == TRUE) { if (s->frame_count == 0) s->force_keyframe = TRUE; enc_fill_encoder_flags(s, &flags); } #ifdef AVPF_DEBUG ms_message("VP8 encoder frames state:"); ms_message("\tgolden: count=%" PRIi64 ", picture_id=0x%04x, ack=%s", s->frames_state.golden.count, s->frames_state.golden.picture_id, (s->frames_state.golden.acknowledged == TRUE) ? "Y" : "N"); ms_message("\taltref: count=%" PRIi64 ", picture_id=0x%04x, ack=%s", s->frames_state.altref.count, s->frames_state.altref.picture_id, (s->frames_state.altref.acknowledged == TRUE) ? "Y" : "N"); #endif err = vpx_codec_encode(&s->codec, &img, s->frame_count, 1, flags, 1000000LL/(2*(int)s->vconf.fps)); /*encoder has half a framerate interval to encode*/ if (err) { ms_error("vpx_codec_encode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)); } else { vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; MSList *list = NULL; /* Update the frames state. */ is_ref_frame=FALSE; if (flags & VPX_EFLAG_FORCE_KF) { enc_mark_reference_frame_as_sent(s, VP8_GOLD_FRAME); enc_mark_reference_frame_as_sent(s, VP8_ALTR_FRAME); s->frames_state.golden.is_independant=TRUE; s->frames_state.altref.is_independant=TRUE; s->frames_state.last_independent_frame=s->frame_count; s->force_keyframe = FALSE; is_ref_frame=TRUE; }else if (flags & VP8_EFLAG_FORCE_GF) { enc_mark_reference_frame_as_sent(s, VP8_GOLD_FRAME); is_ref_frame=TRUE; }else if (flags & VP8_EFLAG_FORCE_ARF) { enc_mark_reference_frame_as_sent(s, VP8_ALTR_FRAME); is_ref_frame=TRUE; }else if (flags & VP8_EFLAG_NO_REF_LAST) { enc_mark_reference_frame_as_sent(s, VP8_LAST_FRAME); is_ref_frame=is_reconstruction_frame_sane(s,flags); } if (is_frame_independent(flags)){ s->frames_state.last_independent_frame=s->frame_count; } /* Pack the encoded frame. */ while( (pkt = vpx_codec_get_cx_data(&s->codec, &iter)) ) { if ((pkt->kind == VPX_CODEC_CX_FRAME_PKT) && (pkt->data.frame.sz > 0)) { Vp8RtpFmtPacket *packet = ms_new0(Vp8RtpFmtPacket, 1); packet->m = allocb(pkt->data.frame.sz, 0); memcpy(packet->m->b_wptr, pkt->data.frame.buf, pkt->data.frame.sz); packet->m->b_wptr += pkt->data.frame.sz; mblk_set_timestamp_info(packet->m, timestamp); packet->pd = ms_new0(Vp8RtpFmtPayloadDescriptor, 1); packet->pd->start_of_partition = TRUE; packet->pd->non_reference_frame = s->avpf_enabled && !is_ref_frame; if (s->avpf_enabled == TRUE) { packet->pd->extended_control_bits_present = TRUE; packet->pd->pictureid_present = TRUE; packet->pd->pictureid = s->picture_id; } else { packet->pd->extended_control_bits_present = FALSE; packet->pd->pictureid_present = FALSE; } if (s->flags & VPX_CODEC_USE_OUTPUT_PARTITION) { packet->pd->pid = (uint8_t)pkt->data.frame.partition_id; if (!(pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT)) { mblk_set_marker_info(packet->m, TRUE); } } else { packet->pd->pid = 0; mblk_set_marker_info(packet->m, TRUE); } list = ms_list_append(list, packet); } } #ifdef AVPF_DEBUG ms_message("VP8 encoder picture_id=%i ***| %s | %s | %s | %s", (int)s->picture_id, (flags & VPX_EFLAG_FORCE_KF) ? "KF " : (flags & VP8_EFLAG_FORCE_GF) ? "GF " : (flags & VP8_EFLAG_FORCE_ARF) ? "ARF" : " ", (flags & VP8_EFLAG_NO_REF_GF) ? "NOREFGF" : " ", (flags & VP8_EFLAG_NO_REF_ARF) ? "NOREFARF" : " ", (flags & VP8_EFLAG_NO_REF_LAST) ? "NOREFLAST" : " "); #endif vp8rtpfmt_packer_process(&s->packer, list, f->outputs[0], f->factory); /* Handle video starter if AVPF is not enabled. */ s->frame_count++; if ((s->avpf_enabled != TRUE) && (s->frame_count == 1)) { ms_video_starter_first_frame(&s->starter, f->ticker->time); } /* Increment the pictureID. */ s->picture_id++; #ifdef PICTURE_ID_ON_16_BITS if (s->picture_id == 0) s->picture_id = 0x8000; #else if (s->picture_id == 0x0080) s->picture_id = 0; #endif } } ms_filter_unlock(f); ms_queue_flush(f->inputs[0]); }
static pj_status_t pj_vpx_encoder_open(vpx_private *vpx) { vpx_codec_flags_t flags = 0; /* XXX: use VPX_CODEC_USE_OUTPUT_PARTITION ? */ const struct vpx_codec_iface *iface = &vpx_codec_vp8_cx_algo; struct vpx_codec_enc_cfg enccfg; int res; TRACE_((THIS_FILE, "vpx pj_vpx_encoder_open")); res = vpx_codec_enc_config_default(iface, &enccfg, 0); if (res != VPX_CODEC_OK) { PJ_LOG(1, (THIS_FILE, "Failed to get vpx default config : %s", vpx_codec_err_to_string(res))); return PJMEDIA_CODEC_EFAILED; } enccfg.g_w = vpx->param.enc_fmt.det.vid.size.w; enccfg.g_h = vpx->param.enc_fmt.det.vid.size.h; enccfg.g_timebase.num = vpx->param.enc_fmt.det.vid.fps.num; enccfg.g_timebase.den = vpx->param.enc_fmt.det.vid.fps.denum; //provide dummy value to initialize wrapper, values will be updated each _encode() vpx_img_wrap(&vpx->rawimg, VPX_IMG_FMT_I420, vpx->param.enc_fmt.det.vid.size.w, vpx->param.enc_fmt.det.vid.size.h, 1, NULL); enccfg.g_threads = number_of_threads(enccfg.g_w, enccfg.g_h, number_of_cores()); PJ_LOG(4, (THIS_FILE, "Using %d threads for VPX encoding", enccfg.g_threads)); enccfg.g_lag_in_frames = 0; enccfg.g_pass = VPX_RC_ONE_PASS; enccfg.rc_end_usage = VPX_CBR; enccfg.rc_target_bitrate = vpx->param.enc_fmt.det.vid.avg_bps / 1000; // in kbit/s enccfg.g_timebase.num = 1; enccfg.g_timebase.den = 90000; enccfg.g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT; enccfg.rc_resize_allowed = 1; enccfg.rc_min_quantizer = 2; enccfg.rc_max_quantizer = 56; enccfg.rc_undershoot_pct = 100; enccfg.rc_overshoot_pct = 15; enccfg.rc_buf_initial_sz = 500; enccfg.rc_buf_optimal_sz = 600; enccfg.rc_buf_sz = 1000; enccfg.kf_mode = VPX_KF_AUTO; enccfg.kf_max_dist = 3000; vpx->rc_max_intra_target = PJ_MAX(300, enccfg.rc_buf_sz * 0.5 * enccfg.g_timebase.num / 10); res = vpx_codec_enc_init(&vpx->encoder, vpx_codec_vp8_cx(), &enccfg, flags); if (res != VPX_CODEC_OK) { PJ_LOG(1, (THIS_FILE, "Failed to init vpx encoder : %s", vpx_codec_err_to_string(res))); return PJMEDIA_CODEC_EFAILED; } vpx_codec_control(&vpx->encoder, VP8E_SET_STATIC_THRESHOLD, 1); vpx_codec_control(&vpx->encoder, VP8E_SET_CPUUSED, -6); // XXX: test vpx_codec_control(&vpx->encoder, VP8E_SET_TOKEN_PARTITIONS, VP8_ONE_TOKENPARTITION); vpx_codec_control(&vpx->encoder, VP8E_SET_MAX_INTRA_BITRATE_PCT, vpx->rc_max_intra_target); #ifdef VP8E_SET_SCREEN_CONTENT_MODE vpx_codec_control(&vpx->encoder, VP8E_SET_SCREEN_CONTENT_MODE, 0); #endif vpx->enc_iter = NULL; vpx->enc_buf_size = vpx->enc_vafp.framebytes; vpx->enc_buf = pj_pool_alloc(vpx->pool, vpx->enc_buf_size); vpx->dec_buf_size = vpx->dec_vafp.framebytes; vpx->dec_buf = pj_pool_alloc(vpx->pool, vpx->dec_buf_size); return PJ_SUCCESS; }
static vpx_codec_err_t vp8_decode(vpx_codec_alg_priv_t *ctx, const uint8_t *data, unsigned int data_sz, void *user_priv, long deadline) { vpx_codec_err_t res = VPX_CODEC_OK; ctx->img_avail = 0; /* Determine the stream parameters. Note that we rely on peek_si to * validate that we have a buffer that does not wrap around the top * of the heap. */ if (!ctx->si.h) res = ctx->base.iface->dec.peek_si(data, data_sz, &ctx->si); /* Perform deferred allocations, if required */ if (!res && ctx->defer_alloc) { int i; for (i = 1; !res && i < NELEMENTS(ctx->mmaps); i++) { vpx_codec_dec_cfg_t cfg; cfg.w = ctx->si.w; cfg.h = ctx->si.h; ctx->mmaps[i].id = vp8_mem_req_segs[i].id; ctx->mmaps[i].sz = vp8_mem_req_segs[i].sz; ctx->mmaps[i].align = vp8_mem_req_segs[i].align; ctx->mmaps[i].flags = vp8_mem_req_segs[i].flags; if (!ctx->mmaps[i].sz) ctx->mmaps[i].sz = vp8_mem_req_segs[i].calc_sz(&cfg, ctx->base.init_flags); res = vp8_mmap_alloc(&ctx->mmaps[i]); } if (!res) vp8_finalize_mmaps(ctx); ctx->defer_alloc = 0; } /* Initialize the decoder instance on the first frame*/ if (!res && !ctx->decoder_init) { res = vp8_validate_mmaps(&ctx->si, ctx->mmaps, ctx->base.init_flags); if (!res) { VP8D_CONFIG oxcf; VP8D_PTR optr; vp8dx_initialize(); oxcf.Width = ctx->si.w; oxcf.Height = ctx->si.h; oxcf.Version = 9; oxcf.postprocess = 0; oxcf.max_threads = ctx->cfg.threads; optr = vp8dx_create_decompressor(&oxcf); /* If postprocessing was enabled by the application and a * configuration has not been provided, default it. */ if (!ctx->postproc_cfg_set && (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)) { ctx->postproc_cfg.post_proc_flag = VP8_DEBLOCK | VP8_DEMACROBLOCK; ctx->postproc_cfg.deblocking_level = 4; ctx->postproc_cfg.noise_level = 0; } if (!optr) res = VPX_CODEC_ERROR; else ctx->pbi = optr; } ctx->decoder_init = 1; } if (!res && ctx->pbi) { YV12_BUFFER_CONFIG sd; INT64 time_stamp = 0, time_end_stamp = 0; vp8_ppflags_t flags = {0}; if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC) { flags.post_proc_flag= ctx->postproc_cfg.post_proc_flag #if CONFIG_POSTPROC_VISUALIZER | ((ctx->dbg_color_ref_frame_flag != 0) ? VP8D_DEBUG_CLR_FRM_REF_BLKS : 0) | ((ctx->dbg_color_mb_modes_flag != 0) ? VP8D_DEBUG_CLR_BLK_MODES : 0) | ((ctx->dbg_color_b_modes_flag != 0) ? VP8D_DEBUG_CLR_BLK_MODES : 0) | ((ctx->dbg_display_mv_flag != 0) ? VP8D_DEBUG_DRAW_MV : 0) #endif ; flags.deblocking_level = ctx->postproc_cfg.deblocking_level; flags.noise_level = ctx->postproc_cfg.noise_level; #if CONFIG_POSTPROC_VISUALIZER flags.display_ref_frame_flag= ctx->dbg_color_ref_frame_flag; flags.display_mb_modes_flag = ctx->dbg_color_mb_modes_flag; flags.display_b_modes_flag = ctx->dbg_color_b_modes_flag; flags.display_mv_flag = ctx->dbg_display_mv_flag; #endif } if (vp8dx_receive_compressed_data(ctx->pbi, data_sz, data, deadline)) { VP8D_COMP *pbi = (VP8D_COMP *)ctx->pbi; res = update_error_state(ctx, &pbi->common.error); } if (!res && 0 == vp8dx_get_raw_frame(ctx->pbi, &sd, &time_stamp, &time_end_stamp, &flags)) { /* Align width/height */ unsigned int a_w = (sd.y_width + 15) & ~15; unsigned int a_h = (sd.y_height + 15) & ~15; vpx_img_wrap(&ctx->img, VPX_IMG_FMT_I420, a_w + 2 * VP8BORDERINPIXELS, a_h + 2 * VP8BORDERINPIXELS, 1, sd.buffer_alloc); vpx_img_set_rect(&ctx->img, VP8BORDERINPIXELS, VP8BORDERINPIXELS, sd.y_width, sd.y_height); ctx->img.user_priv = user_priv; ctx->img_avail = 1; } } return res; }
int32_t kr_vpx_encode (krad_vpx_encoder_t *vpx, kr_codeme_t *codeme, kr_medium_t *medium) { int ret; vpx_image_t image; memset (&image, 0, sizeof(vpx_image_t)); vpx_codec_iter_t iter; //vpx_codec_err_t err; //vpx_codec_cx_pkt_t *pkt; ret = 0; if (vpx->update_config == 1) { krad_vpx_encoder_config_set (vpx, &vpx->cfg); vpx->update_config = 0; krad_vpx_encoder_print_config (vpx); } if (medium == NULL) { ret = vpx_codec_encode (&vpx->encoder, NULL, vpx->frames, 1, vpx->flags, vpx->deadline); } else { vpx_img_wrap (&image, VPX_IMG_FMT_I420, vpx->width, vpx->height, 1, medium->data); //image.w = vpx->width; //image.h = vpx->height; //image.d_w = vpx->width; //image.d_h = vpx->height; //image.planes[0] = medium->v.ppx[0]; //image.planes[1] = medium->v.ppx[1]; //image.planes[2] = medium->v.ppx[2]; image.stride[0] = medium->v.pps[0]; image.stride[1] = medium->v.pps[1]; image.stride[2] = medium->v.pps[2]; ret = vpx_codec_encode (&vpx->encoder, &image, vpx->frames, 1, vpx->flags, vpx->deadline); } if (ret != 0) { printke ("oh shit"); } vpx->frames++; vpx->flags = 0; iter = NULL; vpx->pkt = vpx_codec_get_cx_data (&vpx->encoder, &iter); if (vpx->pkt != NULL) { if (vpx->pkt->kind == VPX_CODEC_CX_FRAME_PKT) { codeme->sz = vpx->pkt->data.frame.sz; memcpy (codeme->data, vpx->pkt->data.frame.buf, codeme->sz); codeme->key = vpx->pkt->data.frame.flags & VPX_FRAME_IS_KEY; if (codeme->key == 0) { vpx->frames_since_keyframe++; } else { vpx->frames_since_keyframe = 0; } return 1; } } return ret; }
static pj_status_t pj_vpx_encoder_open(vpx_private *vpx) { vpx_codec_flags_t flags = 0; const struct vpx_codec_iface *iface = &vpx_codec_vp8_cx_algo; struct vpx_codec_enc_cfg enccfg; int cpu_speed, rc_max_intra_target; int res; PJ_LOG(4, (THIS_FILE, "vpx pj_vpx_encoder_open")); res = vpx_codec_enc_config_default(iface, &enccfg, 0); if (res != VPX_CODEC_OK) { PJ_LOG(1, (THIS_FILE, "Failed to get vpx default config : %s", vpx_codec_err_to_string(res))); return PJMEDIA_CODEC_EFAILED; } enccfg.g_w = vpx->param.enc_fmt.det.vid.size.w; enccfg.g_h = vpx->param.enc_fmt.det.vid.size.h; enccfg.g_timebase.num = vpx->param.enc_fmt.det.vid.fps.num; enccfg.g_timebase.den = vpx->param.enc_fmt.det.vid.fps.denum; //provide dummy value to initialize wrapper, values will be updated each _encode() vpx_img_wrap(&vpx->rawimg, VPX_IMG_FMT_I420, vpx->param.enc_fmt.det.vid.size.w, vpx->param.enc_fmt.det.vid.size.h, 1, (unsigned char*)1); // Following config taken from webRTC project. // vpx seems to support more configurable/complex settings. // could be interesting to have a look, but for now, consider // webRTC settings as optimized for our needs #if defined(PJ_HAS_UNISTD_H) && PJ_HAS_UNISTD_H!=0 if (enccfg.g_w * enccfg.g_h > 704 * 576 && sysconf(_SC_NPROCESSORS_CONF) > 1) { // 2 threads when larger than 4CIF enccfg.g_threads = 2; } else #endif { enccfg.g_threads = 1; } enccfg.g_lag_in_frames = 0; enccfg.g_pass = VPX_RC_ONE_PASS; enccfg.rc_end_usage = VPX_CBR; enccfg.rc_target_bitrate = vpx->param.enc_fmt.det.vid.avg_bps / 1000; // in kbit/s enccfg.g_timebase.num = 1; enccfg.g_timebase.den = 90000; // TODO : need a setting for 2 following ? enccfg.g_error_resilient = 0; enccfg.rc_resize_allowed = 1; enccfg.rc_min_quantizer = 2; enccfg.rc_max_quantizer = 56; enccfg.rc_undershoot_pct = 100; enccfg.rc_overshoot_pct = 15; enccfg.rc_buf_initial_sz = 500; enccfg.rc_buf_optimal_sz = 600; enccfg.rc_buf_sz = 1000; // Not use feedback_mode enccfg.kf_mode = VPX_KF_AUTO; enccfg.kf_max_dist = 3000; /* scalePar = 0.5; maxFrameRate = 30fps*/ /* Don't go below 3 times the per frame bandwidth. */ rc_max_intra_target = PJ_MAX(300, enccfg.rc_buf_sz * 0.5 * 30 / 10); // for android cpu speed set to -12 // TODO : adjust for other platform as done in webRTC cpu_speed = -12; //flags |= VPX_CODEC_USE_OUTPUT_PARTITION; res = vpx_codec_enc_init(&vpx->encoder, vpx_codec_vp8_cx(), &enccfg, flags); if (res != VPX_CODEC_OK) { PJ_LOG(1, (THIS_FILE, "Failed to init vpx encoder : %s", vpx_codec_err_to_string(res))); return PJMEDIA_CODEC_EFAILED; } vpx_codec_control(&vpx->encoder, VP8E_SET_STATIC_THRESHOLD, 1); vpx_codec_control(&vpx->encoder, VP8E_SET_CPUUSED, cpu_speed); vpx_codec_control(&vpx->encoder, VP8E_SET_TOKEN_PARTITIONS, VP8_ONE_TOKENPARTITION); vpx_codec_control(&vpx->encoder, VP8E_SET_NOISE_SENSITIVITY, 1); vpx_codec_control(&vpx->encoder, VP8E_SET_MAX_INTRA_BITRATE_PCT, rc_max_intra_target); vpx->enc_iter = NULL; vpx->enc_buf_size = vpx->enc_vafp.framebytes; vpx->enc_buf = pj_pool_alloc(vpx->pool, vpx->enc_buf_size); vpx->dec_buf_size = vpx->dec_vafp.framebytes; vpx->dec_buf = pj_pool_alloc(vpx->pool, vpx->dec_buf_size); return PJ_SUCCESS; }
nsresult VP8TrackEncoder::Init(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth, int32_t aDisplayHeight,TrackRate aTrackRate) { if (aWidth < 1 || aHeight < 1 || aDisplayWidth < 1 || aDisplayHeight < 1 || aTrackRate <= 0) { return NS_ERROR_FAILURE; } ReentrantMonitorAutoEnter mon(mReentrantMonitor); mTrackRate = aTrackRate; mEncodedFrameRate = DEFAULT_ENCODE_FRAMERATE; mEncodedFrameDuration = mTrackRate / mEncodedFrameRate; mFrameWidth = aWidth; mFrameHeight = aHeight; mDisplayWidth = aDisplayWidth; mDisplayHeight = aDisplayHeight; // Encoder configuration structure. vpx_codec_enc_cfg_t config; memset(&config, 0, sizeof(vpx_codec_enc_cfg_t)); if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &config, 0)) { return NS_ERROR_FAILURE; } // Creating a wrapper to the image - setting image data to NULL. Actual // pointer will be set in encode. Setting align to 1, as it is meaningless // (actual memory is not allocated). vpx_img_wrap(mVPXImageWrapper, IMG_FMT_I420, mFrameWidth, mFrameHeight, 1, nullptr); config.g_w = mFrameWidth; config.g_h = mFrameHeight; // TODO: Maybe we should have various aFrameRate bitrate pair for each devices? // or for different platform config.rc_target_bitrate = DEFAULT_BITRATE; // in kbit/s // Setting the time base of the codec config.g_timebase.num = 1; config.g_timebase.den = mTrackRate; config.g_error_resilient = 0; config.g_lag_in_frames = 0; // 0- no frame lagging int32_t number_of_cores = PR_GetNumberOfProcessors(); if (mFrameWidth * mFrameHeight > 1280 * 960 && number_of_cores >= 6) { config.g_threads = 3; // 3 threads for 1080p. } else if (mFrameWidth * mFrameHeight > 640 * 480 && number_of_cores >= 3) { config.g_threads = 2; // 2 threads for qHD/HD. } else { config.g_threads = 1; // 1 thread for VGA or less } // rate control settings config.rc_dropframe_thresh = 0; config.rc_end_usage = VPX_CBR; config.g_pass = VPX_RC_ONE_PASS; config.rc_resize_allowed = 1; config.rc_undershoot_pct = 100; config.rc_overshoot_pct = 15; config.rc_buf_initial_sz = 500; config.rc_buf_optimal_sz = 600; config.rc_buf_sz = 1000; config.kf_mode = VPX_KF_AUTO; // Ensure that we can output one I-frame per second. config.kf_max_dist = mEncodedFrameRate; vpx_codec_flags_t flags = 0; flags |= VPX_CODEC_USE_OUTPUT_PARTITION; if (vpx_codec_enc_init(mVPXContext, vpx_codec_vp8_cx(), &config, flags)) { return NS_ERROR_FAILURE; } vpx_codec_control(mVPXContext, VP8E_SET_STATIC_THRESHOLD, 1); vpx_codec_control(mVPXContext, VP8E_SET_CPUUSED, -6); vpx_codec_control(mVPXContext, VP8E_SET_TOKEN_PARTITIONS, VP8_ONE_TOKENPARTITION); mInitialized = true; mon.NotifyAll(); return NS_OK; }
} int VPXEncoder::InitDecoder() { _vpxDecoder = new vpx_codec_ctx_t(); /* Initialize decoder */ if (vpx_codec_dec_init(_vpxDecoder, (vpx_codec_vp8_dx()), NULL, 0)) { printf("Failed to initialize libvpx decoder.\n"); return -1; } } int VPXEncoder::Encode(unsigned char * i420, int i420Length, int sampleCount, array<Byte> ^% buffer) { vpx_image_t* const img = vpx_img_wrap(_rawImage, VPX_IMG_FMT_I420, _width, _height, 1, i420); const vpx_codec_cx_pkt_t * pkt; vpx_enc_frame_flags_t flags = 0; if (vpx_codec_encode(_vpxCodec, _rawImage, sampleCount, 1, flags, VPX_DL_REALTIME)) { printf("VPX codec failed to encode the frame.\n"); return -1; } else { vpx_codec_iter_t iter = NULL; while ((pkt = vpx_codec_get_cx_data(_vpxCodec, &iter))) { switch (pkt->kind) { case VPX_CODEC_CX_FRAME_PKT: //vpkt = const_cast<vpx_codec_cx_pkt_t **>(&pkt);