HRESULT GetSampleFromMFStreamer(/* out */ const vpx_codec_cx_pkt_t *& vpkt) { //printf("Get Sample...\n"); IMFSample *videoSample = NULL; // Initial read results in a null pSample?? CHECK_HR(videoReader->ReadSample( MF_SOURCE_READER_ANY_STREAM, // Stream index. 0, // Flags. &streamIndex, // Receives the actual stream index. &flags, // Receives status flags. &llVideoTimeStamp, // Receives the time stamp. &videoSample // Receives the sample or NULL. ), L"Error reading video sample."); if (!videoSample) { printf("Failed to get video sample from MF.\n"); } else { DWORD nCurrBufferCount = 0; CHECK_HR(videoSample->GetBufferCount(&nCurrBufferCount), L"Failed to get the buffer count from the video sample.\n"); IMFMediaBuffer * pMediaBuffer; CHECK_HR(videoSample->ConvertToContiguousBuffer(&pMediaBuffer), L"Failed to extract the video sample into a raw buffer.\n"); DWORD nCurrLen = 0; CHECK_HR(pMediaBuffer->GetCurrentLength(&nCurrLen), L"Failed to get the length of the raw buffer holding the video sample.\n"); byte *imgBuff; DWORD buffCurrLen = 0; DWORD buffMaxLen = 0; pMediaBuffer->Lock(&imgBuff, &buffMaxLen, &buffCurrLen); /*BYTE *i420 = new BYTE[4608000]; YUY2ToI420(WIDTH, HEIGHT, STRIDE, imgBuff, i420); vpx_image_t* img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, i420);*/ vpx_image_t* const img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, imgBuff); const vpx_codec_cx_pkt_t * pkt; vpx_enc_frame_flags_t flags = 0; if (vpx_codec_encode(&_vpxCodec, &_rawImage, _sampleCount, 1, flags, VPX_DL_REALTIME)) { printf("VPX codec failed to encode the frame.\n"); return -1; } else { vpx_codec_iter_t iter = NULL; while ((pkt = vpx_codec_get_cx_data(&_vpxCodec, &iter))) { switch (pkt->kind) { case VPX_CODEC_CX_FRAME_PKT: vpkt = pkt; // const_cast<vpx_codec_cx_pkt_t **>(&pkt); break; default: break; } printf("%s %i\n", pkt->kind == VPX_CODEC_CX_FRAME_PKT && (pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? "K" : ".", pkt->data.frame.sz); } } _sampleCount++; vpx_img_free(img); pMediaBuffer->Unlock(); pMediaBuffer->Release(); //delete i420; videoSample->Release(); return S_OK; } }
bool WebmExporter::add_frame(const vpx_image* data) { auto result = vpx_codec_encode(&_codec, data, _frame_index++, 1, 0, _settings.quality <= 1 ? VPX_DL_BEST_QUALITY : VPX_DL_GOOD_QUALITY); if (result != VPX_CODEC_OK) { codec_error("couldn't encode frame"); return false; } vpx_codec_iter_t iter = nullptr; const vpx_codec_cx_pkt_t* packet = nullptr; bool found_packet = false; while (packet = vpx_codec_get_cx_data(&_codec, &iter)) { found_packet = true; if (packet->kind != VPX_CODEC_CX_FRAME_PKT) { continue; } auto timestamp_ns = 1000000000 * packet->data.frame.pts / _settings.fps; bool result = _segment.AddFrame((uint8_t*) packet->data.frame.buf, packet->data.frame.sz, _video_track, timestamp_ns, packet->data.frame.flags & VPX_FRAME_IS_KEY); if (!result) { std::cerr << "couldn't add frame" << std::endl; return false; } } return found_packet; };
static int encode_frame(vpx_codec_ctx_t *ctx, const vpx_image_t *img, vpx_codec_pts_t pts, unsigned int duration, vpx_enc_frame_flags_t flags, unsigned int deadline, VpxVideoWriter *writer) { int got_pkts = 0; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt = NULL; const vpx_codec_err_t res = vpx_codec_encode(ctx, img, pts, duration, flags, deadline); if (res != VPX_CODEC_OK) die_codec(ctx, "Failed to encode frame."); while ((pkt = vpx_codec_get_cx_data(ctx, &iter)) != NULL) { got_pkts = 1; if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) { const int keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; if (!vpx_video_writer_write_frame(writer, pkt->data.frame.buf, pkt->data.frame.sz, pkt->data.frame.pts)) die_codec(ctx, "Failed to write compressed frame."); printf(keyframe ? "K" : "."); fflush(stdout); } } return got_pkts; }
/** * @brief Encode video frame * * @param av Handler * @param dest Where to * @param dest_max Max size * @param input What to encode * @return int * @retval ToxAvError On error. * @retval >0 On success */ inline__ int toxav_prepare_video_frame(ToxAv *av, int32_t call_index, uint8_t *dest, int dest_max, vpx_image_t *input) { if (cii(call_index, av->msi_session) || !av->calls[call_index].call_active) { LOGGER_WARNING("Action on inactive call: %d", call_index); return ErrorNoCall; } CallSpecific *call = &av->calls[call_index]; reconfigure_video_encoder_resolution(call->cs, input->d_w, input->d_h); int rc = vpx_codec_encode(&call->cs->v_encoder, input, call->cs->frame_counter, 1, 0, MAX_ENCODE_TIME_US); if ( rc != VPX_CODEC_OK) { LOGGER_ERROR("Could not encode video frame: %s\n", vpx_codec_err_to_string(rc)); return ErrorInternal; } ++call->cs->frame_counter; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; int copied = 0; while ( (pkt = vpx_codec_get_cx_data(&call->cs->v_encoder, &iter)) ) { if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) { if ( copied + pkt->data.frame.sz > dest_max ) return ErrorPacketTooLarge; memcpy(dest + copied, pkt->data.frame.buf, pkt->data.frame.sz); copied += pkt->data.frame.sz; } } return copied; }
// Encoding of one frame int LumaEncoder::encode_frame_vpx(vpx_codec_ctx_t *codec, vpx_image_t *img, int frame_index, int flags) { int got_pkts = 0; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt = NULL; const vpx_codec_err_t res = vpx_codec_encode(codec, img, frame_index, 1, flags, VPX_DL_GOOD_QUALITY); // VPX_DL_GOOD_QUALITY/VPX_DL_REALTIME if (res != VPX_CODEC_OK) fprintf(stderr, "Failed to encode frame\n"); while ((pkt = vpx_codec_get_cx_data(codec, &iter)) != NULL) { got_pkts = 1; if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) { const int keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; m_writer.addFrame((const uint8_t*)pkt->data.frame.buf, pkt->data.frame.sz, keyframe); //fprintf(stderr, keyframe ? "K" : "."); fflush(stdout); } } return got_pkts; }
static int get_frame_stats(vpx_codec_ctx_t *ctx, const vpx_image_t *img, vpx_codec_pts_t pts, unsigned int duration, vpx_enc_frame_flags_t flags, unsigned int deadline, vpx_fixed_buf_t *stats) { int got_pkts = 0; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt = NULL; const vpx_codec_err_t res = vpx_codec_encode(ctx, img, pts, duration, flags, deadline); if (res != VPX_CODEC_OK) die_codec(ctx, "Failed to get frame stats."); while ((pkt = vpx_codec_get_cx_data(ctx, &iter)) != NULL) { got_pkts = 1; if (pkt->kind == VPX_CODEC_STATS_PKT) { const uint8_t *const pkt_buf = pkt->data.twopass_stats.buf; const size_t pkt_size = pkt->data.twopass_stats.sz; stats->buf = realloc(stats->buf, stats->sz + pkt_size); memcpy((uint8_t *)stats->buf + stats->sz, pkt_buf, pkt_size); stats->sz += pkt_size; } } return got_pkts; }
/* * Method: codec_encode */ JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_codec_video_VPX_codec_1encode (JNIEnv *env, jclass clazz, jlong context, jlong jimg, jbyteArray bufArray, jint offset0, jint offset1, jint offset2, jlong pts, jlong duration, jlong flags, jlong deadline) { unsigned char *buf = (unsigned char *) (*env)->GetByteArrayElements(env, bufArray, NULL); vpx_image_t *img = (vpx_image_t *) (intptr_t) jimg; img->planes[0] = (buf + offset0); img->planes[1] = (buf + offset1); img->planes[2] = (buf + offset2); img->planes[3] = 0; jint ret = (jint) vpx_codec_encode( (vpx_codec_ctx_t *) (intptr_t) context, img, (vpx_codec_pts_t) pts, (unsigned long) duration, (vpx_enc_frame_flags_t) flags, (unsigned long) deadline); (*env)->ReleaseByteArrayElements(env, bufArray, (jbyte *)buf, JNI_ABORT); return ret; }
static void enc_process(MSFilter *f) { mblk_t *im,*om; uint64_t timems=f->ticker->time; uint32_t timestamp=timems*90; EncState *s=(EncState*)f->data; unsigned int flags = 0; vpx_codec_err_t err; YuvBuf yuv; ms_filter_lock(f); while((im=ms_queue_get(f->inputs[0]))!=NULL){ vpx_image_t img; om = NULL; flags = 0; ms_yuv_buf_init_from_mblk(&yuv, im); vpx_img_wrap(&img, VPX_IMG_FMT_I420, s->vconf.vsize.width, s->vconf.vsize.height, 1, yuv.planes[0]); if (video_starter_need_i_frame (&s->starter,f->ticker->time)){ /*sends an I frame at 2 seconds and 4 seconds after the beginning of the call*/ s->req_vfu=TRUE; } if (s->req_vfu){ flags = VPX_EFLAG_FORCE_KF; s->req_vfu=FALSE; } err = vpx_codec_encode(&s->codec, &img, s->frame_count, 1, flags, VPX_DL_REALTIME); if (err) { ms_error("vpx_codec_encode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)); } else { vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; s->frame_count++; if (s->frame_count==1){ video_starter_first_frame (&s->starter,f->ticker->time); } while( (pkt = vpx_codec_get_cx_data(&s->codec, &iter)) ) { if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) { if (pkt->data.frame.sz > 0) { om = allocb(pkt->data.frame.sz,0); memcpy(om->b_wptr, pkt->data.frame.buf, pkt->data.frame.sz); om->b_wptr += pkt->data.frame.sz; #ifdef FRAGMENT_ON_PARTITIONS vp8_fragment_and_send(f, s, om, timestamp, pkt, (pkt->data.frame.partition_id == s->token_partition_count)); #else vp8_fragment_and_send(f, s, om, timestamp, pkt, 1); #endif } } } } freemsg(im); } ms_filter_unlock(f); }
static int encode_frame(vpx_codec_ctx_t *codec, vpx_image_t *img, int frame_index, VpxVideoWriter *writer) { int got_pkts = 0; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt = NULL; const vpx_codec_err_t res = vpx_codec_encode(codec, img, frame_index, 1, 0, VPX_DL_GOOD_QUALITY); if (res != VPX_CODEC_OK) die_codec(codec, "Failed to encode frame"); while ((pkt = vpx_codec_get_cx_data(codec, &iter)) != NULL) { got_pkts = 1; if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) { const int keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; if (!vpx_video_writer_write_frame(writer, pkt->data.frame.buf, pkt->data.frame.sz, pkt->data.frame.pts)) { die_codec(codec, "Failed to write compressed frame"); } printf(keyframe ? "K" : "."); fflush(stdout); } } return got_pkts; }
/** * @brief Encode and send video packet. * * @param av Handler. * @param input The packet. * @return int * @retval 0 Success. * @retval ToxAvError On error. */ inline__ int toxav_send_video ( ToxAv *av, vpx_image_t *input) { if (vpx_codec_encode(&av->cs->v_encoder, input, av->cs->frame_counter, 1, 0, MAX_ENCODE_TIME_US) != VPX_CODEC_OK) { fprintf(stderr, "Could not encode video frame\n"); return ErrorInternal; } ++av->cs->frame_counter; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; int sent = 0; while ( (pkt = vpx_codec_get_cx_data(&av->cs->v_encoder, &iter)) ) { if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) { if (toxav_send_rtp_payload(av, TypeVideo, pkt->data.frame.buf, pkt->data.frame.sz) != -1) ++sent; } } if (sent > 0) return 0; return ErrorInternal; }
/** * @brief Encode video frame * * @param av Handler * @param dest Where to * @param dest_max Max size * @param input What to encode * @return int * @retval ToxAvError On error. * @retval >0 On success */ inline__ int toxav_prepare_video_frame(ToxAv *av, int32_t call_index, uint8_t *dest, int dest_max, vpx_image_t *input) { CallSpecific *call = &av->calls[call_index]; int rc = vpx_codec_encode(&call->cs->v_encoder, input, call->cs->frame_counter, 1, 0, MAX_ENCODE_TIME_US); if ( rc != VPX_CODEC_OK) { fprintf(stderr, "Could not encode video frame: %s\n", vpx_codec_err_to_string(rc)); return ErrorInternal; } ++call->cs->frame_counter; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; int copied = 0; while ( (pkt = vpx_codec_get_cx_data(&call->cs->v_encoder, &iter)) ) { if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) { if ( copied + pkt->data.frame.sz > dest_max ) return ErrorPacketTooLarge; mempcpy(dest + copied, pkt->data.frame.buf, pkt->data.frame.sz); copied += pkt->data.frame.sz; } } return copied; }
JNIEXPORT jint Java_ryulib_VideoZip_VPX_EncodeBitmap(JNIEnv* env, jclass clazz, jint handle, jobject bitmap, jbyteArray buffer, jint bufferSize, jint deadline) { RyuVPX *pHandle = (RyuVPX *) handle; jbyte *pByteBuffer = (*env)->GetByteArrayElements(env, buffer, 0); int packet_size = 0; int frame_cnt = 0; int flags = 0; unsigned long ulDeadline = VPX_DL_GOOD_QUALITY; switch (deadline) { case 0: ulDeadline = VPX_DL_REALTIME; break; case 1: ulDeadline = VPX_DL_GOOD_QUALITY; break; case 2: ulDeadline = VPX_DL_BEST_QUALITY; break; } void *pixelBitmap; if (AndroidBitmap_lockPixels(env, bitmap, &pixelBitmap) >= 0) { RGBtoYUV420((unsigned char*) pixelBitmap, pHandle->img.planes[0], pHandle->cfgEnc.g_w, pHandle->cfgEnc.g_h, _PixelSize); int encodeResult = vpx_codec_encode(&pHandle->codec, &pHandle->img, frame_cnt, 1, flags, ulDeadline); AndroidBitmap_unlockPixels(env, bitmap); if (encodeResult) goto EXIT; } const vpx_codec_cx_pkt_t *pPacket; vpx_codec_iter_t iter = NULL; unsigned char *pFrame = (unsigned char *) pByteBuffer; int *pFrameSize; while ( (pPacket = (vpx_codec_get_cx_data(&pHandle->codec, &iter))) ) { if ((packet_size + sizeof(int) + pPacket->data.frame.sz) >= bufferSize) goto EXIT; switch (pPacket->kind) { case VPX_CODEC_CX_FRAME_PKT: { pFrameSize = (int *) pFrame; *pFrameSize = pPacket->data.frame.sz; pFrame = pFrame + sizeof(int); memcpy(pFrame, pPacket->data.frame.buf, pPacket->data.frame.sz); pFrame = pFrame + pPacket->data.frame.sz; packet_size = packet_size + sizeof(int) + pPacket->data.frame.sz; } break; default: break; } } EXIT: (*env)->ReleaseByteArrayElements(env, buffer, pByteBuffer, 0); return packet_size; }
static int encode_frame(vpx_codec_ctx_t *ecodec, vpx_codec_enc_cfg_t *cfg, vpx_image_t *img, unsigned int frame_in, VpxVideoWriter *writer, int test_decode, vpx_codec_ctx_t *dcodec, unsigned int *frame_out, int *mismatch_seen) { int got_pkts = 0; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt = NULL; int got_data; const vpx_codec_err_t res = vpx_codec_encode(ecodec, img, frame_in, 1, 0, VPX_DL_GOOD_QUALITY); if (res != VPX_CODEC_OK) die_codec(ecodec, "Failed to encode frame"); got_data = 0; while ((pkt = vpx_codec_get_cx_data(ecodec, &iter)) != NULL) { got_pkts = 1; if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) { const int keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; if (!(pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT)) { *frame_out += 1; } if (!vpx_video_writer_write_frame(writer, pkt->data.frame.buf, pkt->data.frame.sz, pkt->data.frame.pts)) { die_codec(ecodec, "Failed to write compressed frame"); } printf(keyframe ? "K" : "."); fflush(stdout); got_data = 1; // Decode 1 frame. if (test_decode) { if (vpx_codec_decode(dcodec, pkt->data.frame.buf, (unsigned int)pkt->data.frame.sz, NULL, 0)) die_codec(dcodec, "Failed to decode frame."); } } } // Mismatch checking if (got_data && test_decode) { testing_decode(ecodec, dcodec, cfg, *frame_out, mismatch_seen); } return got_pkts; }
int vp8Encoder::encode(int buffer_time, uint8_t* video_data) { struct timeval start; struct timeval end; gettimeofday(&start, NULL); uint8_t *src = video_data; int yPlaneSize = raw.d_w * raw.d_h; memcpy(raw.planes[VPX_PLANE_Y], src, yPlaneSize); unsigned char *inPtr = src + yPlaneSize; unsigned char *cbPtr = raw.planes[VPX_PLANE_U]; unsigned char *crPtr = raw.planes[VPX_PLANE_V]; for(int i = 0; i < raw.d_h/2; i++) { for(int j = 0; j < raw.d_w/2; j++) { cbPtr[j] = inPtr[0]; crPtr[j] = inPtr[1]; inPtr+=2; } cbPtr += raw.stride[VPX_PLANE_U]; crPtr += raw.stride[VPX_PLANE_V]; } const vpx_codec_cx_pkt_t *pkt; vpx_codec_iter_t iter = NULL; int flags = 0; if(!(frame_cnt & 3)) // flags |= VPX_EFLAG_FORCE_KF; // else // flags &= ~VPX_EFLAG_FORCE_KF; //pthread_mutex_lock (&mCodecLock); LOGD("Start to encoding ..."); vpx_codec_encode(&encoder, &raw, frame_cnt++, 1, flags, 20000);//VPX_DL_REALTIME); while ((pkt = vpx_codec_get_cx_data(&encoder, &iter))) { if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) { gettimeofday(&end, NULL); long time_use = (end.tv_sec-start.tv_sec)*1000000+(end.tv_usec-start.tv_usec); LOGD("vp8encode consume=%.2fms.", (double)time_use/1000.0); mOutput->appendData((uint8_t*)pkt->data.frame.buf, pkt->data.frame.sz); } } //pthread_mutex_unlock (&mCodecLock); }
static GstFlowReturn gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame) { GstVP8Enc *encoder; const GstVideoState *state; vpx_codec_err_t status; int flags = 0; vpx_image_t *image; GstVP8EncCoderHook *hook; int quality; GST_DEBUG_OBJECT (base_video_encoder, "handle_frame"); encoder = GST_VP8_ENC (base_video_encoder); state = gst_base_video_encoder_get_state (base_video_encoder); encoder->n_frames++; GST_DEBUG_OBJECT (base_video_encoder, "size %d %d", state->width, state->height); image = gst_vp8_enc_buffer_to_image (encoder, frame->sink_buffer); hook = g_slice_new0 (GstVP8EncCoderHook); hook->image = image; frame->coder_hook = hook; frame->coder_hook_destroy_notify = (GDestroyNotify) gst_vp8_enc_coder_hook_free; if (frame->force_keyframe) { flags |= VPX_EFLAG_FORCE_KF; } quality = (encoder->speed == 0) ? VPX_DL_BEST_QUALITY : VPX_DL_GOOD_QUALITY; status = vpx_codec_encode (&encoder->encoder, image, encoder->n_frames, 1, flags, quality); if (status != 0) { GST_ELEMENT_ERROR (encoder, LIBRARY, ENCODE, ("Failed to encode frame"), ("%s", gst_vpx_error_name (status))); g_slice_free (GstVP8EncCoderHook, hook); frame->coder_hook = NULL; g_slice_free (vpx_image_t, image); return FALSE; } return gst_vp8_enc_process (encoder); }
static pj_status_t pj_vpx_codec_encode_begin(pjmedia_vid_codec *codec, const pjmedia_vid_encode_opt *opt, const pjmedia_frame *input, unsigned out_size, pjmedia_frame *output, pj_bool_t *has_more) { vpx_private *vpx = (vpx_private*) codec->codec_data; pj_status_t status; vpx_image_t *rawimg; vpx_enc_frame_flags_t flags = 0; pj_uint8_t *p; int i, res; PJ_ASSERT_RETURN(codec && input, PJ_EINVAL); p = (pj_uint8_t*) input->buf; *has_more = PJ_FALSE; rawimg = &vpx->rawimg; // TODO : make sure it's I420 ? if(input->size < vpx->enc_vafp.framebytes){ PJ_LOG(1, (THIS_FILE, "Frame provided is too small !")); return PJ_ETOOSMALL; } for (i = 0; i < vpx->enc_vfi->plane_cnt; ++i) { rawimg->planes[i] = p; rawimg->stride[i] = vpx->enc_vafp.strides[i]; p += vpx->enc_vafp.plane_bytes[i]; } if (opt && opt->force_keyframe) { flags |= VPX_EFLAG_FORCE_KF; } res = vpx_codec_encode(&vpx->encoder, rawimg, input->timestamp.u64, 1, flags, VPX_DL_REALTIME); if (res != VPX_CODEC_OK) { PJ_LOG(1, (THIS_FILE, "Failed to encode : %s %s", vpx_codec_err_to_string(res), vpx->encoder.err_detail)); return PJMEDIA_CODEC_EFAILED; } vpx->enc_iter = NULL; vpx->enc_frame_len = 0; vpx->enc_processed = 0; return pj_vpx_codec_encode_more(codec, out_size, output, has_more); }
int krad_vpx_encoder_write (krad_vpx_encoder_t *kradvpx, unsigned char **packet, int *keyframe) { if (kradvpx->update_config == 1) { krad_vpx_encoder_config_set (kradvpx, &kradvpx->cfg); kradvpx->update_config = 0; //krad_vpx_encoder_print_config (kradvpx); } #ifdef BENCHMARK krad_vpx_benchmark_start(kradvpx); #endif if (vpx_codec_encode(&kradvpx->encoder, kradvpx->image, kradvpx->frames, 1, kradvpx->flags, kradvpx->deadline)) { krad_vpx_fail (&kradvpx->encoder, "Failed to encode frame"); } kradvpx->frames++; kradvpx->flags = 0; kradvpx->iter = NULL; while ((kradvpx->pkt = vpx_codec_get_cx_data (&kradvpx->encoder, &kradvpx->iter))) { //printkd ("Got packet\n"); if (kradvpx->pkt->kind == VPX_CODEC_CX_FRAME_PKT) { *packet = kradvpx->pkt->data.frame.buf; *keyframe = kradvpx->pkt->data.frame.flags & VPX_FRAME_IS_KEY; if (*keyframe == 0) { kradvpx->frames_since_keyframe++; } else { kradvpx->frames_since_keyframe = 0; //printkd ("keyframe is %d pts is -%ld-\n", *keyframe, kradvpx->pkt->data.frame.pts); } #ifdef BENCHMARK krad_vpx_benchmark_finish(kradvpx); #endif return kradvpx->pkt->data.frame.sz; } } return 0; }
int krad_vpx_encoder_write (krad_vpx_encoder_t *vpx, unsigned char **packet, int *keyframe) { if (vpx->update_config == 1) { krad_vpx_encoder_config_set (vpx, &vpx->cfg); vpx->update_config = 0; krad_vpx_encoder_print_config (vpx); } if (vpx_codec_encode (&vpx->encoder, vpx->image, vpx->frames, 1, vpx->flags, vpx->deadline)) { krad_vpx_fail (&vpx->encoder, "Failed to encode frame"); } vpx->frames++; vpx->flags = 0; vpx->iter = NULL; while ((vpx->pkt = vpx_codec_get_cx_data (&vpx->encoder, &vpx->iter))) { //printkd ("Got packet\n"); if (vpx->pkt->kind == VPX_CODEC_CX_FRAME_PKT) { *packet = vpx->pkt->data.frame.buf; *keyframe = vpx->pkt->data.frame.flags & VPX_FRAME_IS_KEY; if (*keyframe == 0) { vpx->frames_since_keyframe++; } else { vpx->frames_since_keyframe = 0; //printkd ("keyframe is %d pts is -%ld-\n", // *keyframe, vpx->pkt->data.frame.pts); } vpx->bytes += vpx->pkt->data.frame.sz; return vpx->pkt->data.frame.sz; } } return 0; }
static GstFlowReturn gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder) { GstVP8Enc *encoder; int flags = 0; vpx_codec_err_t status; GST_DEBUG_OBJECT (base_video_encoder, "finish"); encoder = GST_VP8_ENC (base_video_encoder); status = vpx_codec_encode (&encoder->encoder, NULL, encoder->n_frames, 1, flags, 0); if (status != 0) { GST_ERROR_OBJECT (encoder, "encode returned %d %s", status, gst_vpx_error_name (status)); return GST_FLOW_ERROR; } /* dispatch remaining frames */ gst_vp8_enc_process (encoder); if (encoder->multipass_mode == VPX_RC_FIRST_PASS && encoder->multipass_cache_file) { GError *err = NULL; if (!g_file_set_contents (encoder->multipass_cache_file, (const gchar *) encoder->first_pass_cache_content->data, encoder->first_pass_cache_content->len, &err)) { GST_ELEMENT_ERROR (encoder, RESOURCE, WRITE, (NULL), ("Failed to write multipass cache file: %s", err->message)); g_error_free (err); } } return GST_FLOW_OK; }
int compress_image(struct vpx_context *ctx, uint8_t *input[3], int input_stride[3], uint8_t **out, int *outsz) { struct vpx_image image; const vpx_codec_cx_pkt_t *pkt; vpx_codec_iter_t iter = NULL; int frame_cnt = 0; int flags = 0; int i = 0; /* Encoding */ memset(&image, 0, sizeof(struct vpx_image)); image.w = ctx->width; image.h = ctx->height; image.fmt = ctx->pixfmt; image.planes[0] = input[0]; image.planes[1] = input[1]; image.planes[2] = input[2]; image.stride[0] = input_stride[0]; image.stride[1] = input_stride[1]; image.stride[2] = input_stride[2]; image.d_w = ctx->width; image.d_h = ctx->height; image.x_chroma_shift = 0; image.y_chroma_shift = 0; image.bps = 8; i = vpx_codec_encode(&ctx->codec, &image, frame_cnt, 1, flags, VPX_DL_REALTIME); if (i) { codec_error(&ctx->codec, "vpx_codec_encode"); return i; } pkt = vpx_codec_get_cx_data(&ctx->codec, &iter); if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) { return 1; } *out = pkt->data.frame.buf; *outsz = pkt->data.frame.sz; return 0; }
int write_webm_frame_data(encoding_context *context, vpx_image_t *image) { int duration = next_frame_duration(context); vpx_codec_err_t result = vpx_codec_encode(&context->codec, image, context->timebase_unit_count, duration, 0, context->deadline); context->frame_count += 1; context->timebase_unit_count += duration; if (result!=VPX_CODEC_OK) { cleanup(context); return 10000 + context->frame_count; } vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; while( (pkt = vpx_codec_get_cx_data(&context->codec, &iter)) ) { switch (pkt->kind) { case VPX_CODEC_CX_FRAME_PKT: write_webm_block(&context->ebml, &context->cfg, pkt); break; } } return 0; }
/** * Encode a frame into multiple layers * Create a superframe containing the individual layers */ vpx_codec_err_t vpx_svc_encode(SvcContext *svc_ctx, vpx_codec_ctx_t *codec_ctx, struct vpx_image *rawimg, vpx_codec_pts_t pts, int64_t duration, int deadline) { vpx_codec_err_t res; vpx_codec_iter_t iter; const vpx_codec_cx_pkt_t *cx_pkt; SvcInternal_t *const si = get_svc_internal(svc_ctx); if (svc_ctx == NULL || codec_ctx == NULL || si == NULL) { return VPX_CODEC_INVALID_PARAM; } svc_log_reset(svc_ctx); res = vpx_codec_encode(codec_ctx, rawimg, pts, (uint32_t)duration, 0, deadline); if (res != VPX_CODEC_OK) { return res; } // save compressed data iter = NULL; while ((cx_pkt = vpx_codec_get_cx_data(codec_ctx, &iter))) { switch (cx_pkt->kind) { #if CONFIG_SPATIAL_SVC && defined(VPX_TEST_SPATIAL_SVC) case VPX_CODEC_SPATIAL_SVC_LAYER_PSNR: { int i; for (i = 0; i < svc_ctx->spatial_layers; ++i) { int j; svc_log(svc_ctx, SVC_LOG_DEBUG, "SVC frame: %d, layer: %d, PSNR(Total/Y/U/V): " "%2.3f %2.3f %2.3f %2.3f \n", si->psnr_pkt_received, i, cx_pkt->data.layer_psnr[i].psnr[0], cx_pkt->data.layer_psnr[i].psnr[1], cx_pkt->data.layer_psnr[i].psnr[2], cx_pkt->data.layer_psnr[i].psnr[3]); svc_log(svc_ctx, SVC_LOG_DEBUG, "SVC frame: %d, layer: %d, SSE(Total/Y/U/V): " "%2.3f %2.3f %2.3f %2.3f \n", si->psnr_pkt_received, i, cx_pkt->data.layer_psnr[i].sse[0], cx_pkt->data.layer_psnr[i].sse[1], cx_pkt->data.layer_psnr[i].sse[2], cx_pkt->data.layer_psnr[i].sse[3]); for (j = 0; j < COMPONENTS; ++j) { si->psnr_sum[i][j] += cx_pkt->data.layer_psnr[i].psnr[j]; si->sse_sum[i][j] += cx_pkt->data.layer_psnr[i].sse[j]; } } ++si->psnr_pkt_received; break; } case VPX_CODEC_SPATIAL_SVC_LAYER_SIZES: { int i; for (i = 0; i < svc_ctx->spatial_layers; ++i) si->bytes_sum[i] += cx_pkt->data.layer_sizes[i]; break; } #endif case VPX_CODEC_PSNR_PKT: { #if CONFIG_SPATIAL_SVC && defined(VPX_TEST_SPATIAL_SVC) int j; svc_log(svc_ctx, SVC_LOG_DEBUG, "frame: %d, layer: %d, PSNR(Total/Y/U/V): " "%2.3f %2.3f %2.3f %2.3f \n", si->psnr_pkt_received, 0, cx_pkt->data.layer_psnr[0].psnr[0], cx_pkt->data.layer_psnr[0].psnr[1], cx_pkt->data.layer_psnr[0].psnr[2], cx_pkt->data.layer_psnr[0].psnr[3]); for (j = 0; j < COMPONENTS; ++j) { si->psnr_sum[0][j] += cx_pkt->data.layer_psnr[0].psnr[j]; si->sse_sum[0][j] += cx_pkt->data.layer_psnr[0].sse[j]; } #endif } ++si->psnr_pkt_received; break; default: { break; } } } return VPX_CODEC_OK; }
static gboolean gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder) { GstVP8Enc *encoder; GstVideoFrame *frame; int flags = 0; vpx_codec_err_t status; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; GST_DEBUG_OBJECT (base_video_encoder, "finish"); encoder = GST_VP8_ENC (base_video_encoder); status = vpx_codec_encode (&encoder->encoder, NULL, encoder->n_frames, 1, flags, 0); if (status != 0) { GST_ERROR_OBJECT (encoder, "encode returned %d %s", status, gst_vpx_error_name (status)); return FALSE; } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); while (pkt != NULL) { GstBuffer *buffer; GstVP8EncCoderHook *hook; gboolean invisible, keyframe; GST_DEBUG_OBJECT (encoder, "packet %u type %d", (guint) pkt->data.frame.sz, pkt->kind); if (pkt->kind == VPX_CODEC_STATS_PKT && encoder->multipass_mode == VPX_RC_FIRST_PASS) { GST_LOG_OBJECT (encoder, "handling STATS packet"); g_byte_array_append (encoder->first_pass_cache_content, pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz); frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder); if (frame != NULL) { buffer = gst_buffer_new (); GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_PREROLL); frame->src_buffer = buffer; gst_base_video_encoder_finish_frame (base_video_encoder, frame); } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); continue; } else if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) { GST_LOG_OBJECT (encoder, "non frame pkt: %d", pkt->kind); pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); continue; } invisible = (pkt->data.frame.flags & VPX_FRAME_IS_INVISIBLE) != 0; keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder); g_assert (frame != NULL); hook = frame->coder_hook; buffer = gst_buffer_new_and_alloc (pkt->data.frame.sz); memcpy (GST_BUFFER_DATA (buffer), pkt->data.frame.buf, pkt->data.frame.sz); frame->is_sync_point = frame->is_sync_point || keyframe; if (hook->image) g_slice_free (vpx_image_t, hook->image); hook->image = NULL; if (invisible) { hook->invisible = g_list_append (hook->invisible, buffer); } else { frame->src_buffer = buffer; gst_base_video_encoder_finish_frame (base_video_encoder, frame); frame = NULL; } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); } if (encoder->multipass_mode == VPX_RC_FIRST_PASS && encoder->multipass_cache_file) { GError *err = NULL; if (!g_file_set_contents (encoder->multipass_cache_file, (const gchar *) encoder->first_pass_cache_content->data, encoder->first_pass_cache_content->len, &err)) { GST_ELEMENT_ERROR (encoder, RESOURCE, WRITE, (NULL), ("Failed to write multipass cache file: %s", err->message)); g_error_free (err); } } return TRUE; }
int main(int argc, char* argv[]) { if (argc != 7) { fprintf(stderr, " Usage: WebMEnc <input filename> <flip> <threads> <bit-rates> <frame-per-second> <output filename>\nExample: WebMEnc frame.%%.5d.tiff 1 8 512 30 frame.webm\n"); return EXIT_FAILURE; } ilInit(); iluInit(); // Initialize VPX codec. // vpx_codec_ctx_t vpxContext; vpx_codec_enc_cfg_t vpxConfig; if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &vpxConfig, 0) != VPX_CODEC_OK) { return EXIT_FAILURE; } // Try to load the first frame to initialize width and height. // int flip = (bool)atoi(argv[2]); vpx_image_t *rgbImage = NULL, *yv12Image = NULL; if (readImage(argv[1], 0, &rgbImage, &yv12Image, flip) == false) { return EXIT_FAILURE; } vpxConfig.g_h = yv12Image->h; vpxConfig.g_w = yv12Image->w; vpxConfig.g_threads = atoi(argv[3]); vpxConfig.rc_target_bitrate = atoi(argv[4]); vpxConfig.g_timebase.den = atoi(argv[5]); vpxConfig.g_timebase.num = 1; // Prepare the output .webm file. // EbmlGlobal ebml; memset(&ebml, 0, sizeof(EbmlGlobal)); ebml.last_pts_ms = -1; ebml.stream = fopen(argv[6], "wb"); if (ebml.stream == NULL) { return EXIT_FAILURE; } vpx_rational ebmlFPS = vpxConfig.g_timebase; struct vpx_rational arg_framerate = {atoi(argv[5]), 1}; Ebml_WriteWebMFileHeader(&ebml, &vpxConfig, &arg_framerate); unsigned long duration = (float)arg_framerate.den / (float)arg_framerate.num * 1000; if (vpx_codec_enc_init(&vpxContext, vpx_codec_vp8_cx(), &vpxConfig, 0) != VPX_CODEC_OK) { return EXIT_FAILURE; } // fprintf(stdout, "input=%s\nflip=%s\nthreads=%s\nbps=%s\nfps=%s\noutput=%s\n", argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]); // Reading image file sequence, encoding to .WebM file. // int frameNumber = 0; while(readImage(argv[1], frameNumber, &rgbImage, &yv12Image, flip)) { vpx_codec_err_t vpxError = vpx_codec_encode(&vpxContext, yv12Image, frameNumber, duration, 0, 0); if (vpxError != VPX_CODEC_OK) { return EXIT_FAILURE; } vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *packet; while( (packet = vpx_codec_get_cx_data(&vpxContext, &iter)) ) { Ebml_WriteWebMBlock(&ebml, &vpxConfig, packet); } frameNumber ++; printf("Processed %d frames.\r", frameNumber); vpx_img_free(yv12Image); yv12Image = NULL; } Ebml_WriteWebMFileFooter(&ebml, 0); fclose(ebml.stream); vpx_codec_destroy(&vpxContext); return EXIT_SUCCESS; }
int32_t kr_vpx_encode (krad_vpx_encoder_t *vpx, kr_codeme_t *codeme, kr_medium_t *medium) { int ret; vpx_image_t image; memset (&image, 0, sizeof(vpx_image_t)); vpx_codec_iter_t iter; //vpx_codec_err_t err; //vpx_codec_cx_pkt_t *pkt; ret = 0; if (vpx->update_config == 1) { krad_vpx_encoder_config_set (vpx, &vpx->cfg); vpx->update_config = 0; krad_vpx_encoder_print_config (vpx); } if (medium == NULL) { ret = vpx_codec_encode (&vpx->encoder, NULL, vpx->frames, 1, vpx->flags, vpx->deadline); } else { vpx_img_wrap (&image, VPX_IMG_FMT_I420, vpx->width, vpx->height, 1, medium->data); //image.w = vpx->width; //image.h = vpx->height; //image.d_w = vpx->width; //image.d_h = vpx->height; //image.planes[0] = medium->v.ppx[0]; //image.planes[1] = medium->v.ppx[1]; //image.planes[2] = medium->v.ppx[2]; image.stride[0] = medium->v.pps[0]; image.stride[1] = medium->v.pps[1]; image.stride[2] = medium->v.pps[2]; ret = vpx_codec_encode (&vpx->encoder, &image, vpx->frames, 1, vpx->flags, vpx->deadline); } if (ret != 0) { printke ("oh shit"); } vpx->frames++; vpx->flags = 0; iter = NULL; vpx->pkt = vpx_codec_get_cx_data (&vpx->encoder, &iter); if (vpx->pkt != NULL) { if (vpx->pkt->kind == VPX_CODEC_CX_FRAME_PKT) { codeme->sz = vpx->pkt->data.frame.sz; memcpy (codeme->data, vpx->pkt->data.frame.buf, codeme->sz); codeme->key = vpx->pkt->data.frame.flags & VPX_FRAME_IS_KEY; if (codeme->key == 0) { vpx->frames_since_keyframe++; } else { vpx->frames_since_keyframe = 0; } return 1; } } return ret; }
int main(int argc, char **argv) { FILE *infile, *outfile[MAX_LAYERS]; vpx_codec_ctx_t codec; vpx_codec_enc_cfg_t cfg; int frame_cnt = 0; vpx_image_t raw; vpx_codec_err_t res; unsigned int width; unsigned int height; int frame_avail; int got_data; int flags = 0; int i; int pts = 0; // PTS starts at 0 int frame_duration = 1; // 1 timebase tick per frame int layering_mode = 0; int frames_in_layer[MAX_LAYERS] = {0}; int layer_flags[MAX_PERIODICITY] = {0}; // Check usage and arguments if (argc < 9) die("Usage: %s <infile> <outfile> <width> <height> <rate_num> " " <rate_den> <mode> <Rate_0> ... <Rate_nlayers-1>\n", argv[0]); width = strtol (argv[3], NULL, 0); height = strtol (argv[4], NULL, 0); if (width < 16 || width%2 || height <16 || height%2) die ("Invalid resolution: %d x %d", width, height); if (!sscanf(argv[7], "%d", &layering_mode)) die ("Invalid mode %s", argv[7]); if (layering_mode<0 || layering_mode>6) die ("Invalid mode (0..6) %s", argv[7]); if (argc != 8+mode_to_num_layers[layering_mode]) die ("Invalid number of arguments"); if (!vpx_img_alloc (&raw, VPX_IMG_FMT_I420, width, height, 1)) die ("Failed to allocate image", width, height); printf("Using %s\n",vpx_codec_iface_name(interface)); // Populate encoder configuration res = vpx_codec_enc_config_default(interface, &cfg, 0); if(res) { printf("Failed to get config: %s\n", vpx_codec_err_to_string(res)); return EXIT_FAILURE; } // Update the default configuration with our settings cfg.g_w = width; cfg.g_h = height; // Timebase format e.g. 30fps: numerator=1, demoninator=30 if (!sscanf (argv[5], "%d", &cfg.g_timebase.num )) die ("Invalid timebase numerator %s", argv[5]); if (!sscanf (argv[6], "%d", &cfg.g_timebase.den )) die ("Invalid timebase denominator %s", argv[6]); for (i=8; i<8+mode_to_num_layers[layering_mode]; i++) if (!sscanf(argv[i], "%d", &cfg.ts_target_bitrate[i-8])) die ("Invalid data rate %s", argv[i]); // Real time parameters cfg.rc_dropframe_thresh = 0; cfg.rc_end_usage = VPX_CBR; cfg.rc_resize_allowed = 0; cfg.rc_min_quantizer = 4; cfg.rc_max_quantizer = 63; cfg.rc_undershoot_pct = 98; cfg.rc_overshoot_pct = 100; cfg.rc_buf_initial_sz = 500; cfg.rc_buf_optimal_sz = 600; cfg.rc_buf_sz = 1000; // Enable error resilient mode cfg.g_error_resilient = 1; cfg.g_lag_in_frames = 0; cfg.kf_mode = VPX_KF_DISABLED; // Disable automatic keyframe placement cfg.kf_min_dist = cfg.kf_max_dist = 1000; // Temporal scaling parameters: // NOTE: The 3 prediction frames cannot be used interchangeably due to // differences in the way they are handled throughout the code. The // frames should be allocated to layers in the order LAST, GF, ARF. // Other combinations work, but may produce slightly inferior results. switch (layering_mode) { case 0: { // 2-layers, 2-frame period int ids[2] = {0,1}; cfg.ts_number_layers = 2; cfg.ts_periodicity = 2; cfg.ts_rate_decimator[0] = 2; cfg.ts_rate_decimator[1] = 1; memcpy(cfg.ts_layer_id, ids, sizeof(ids)); #if 1 // 0=L, 1=GF, Intra-layer prediction enabled layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF; layer_flags[1] = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_REF_ARF; #else // 0=L, 1=GF, Intra-layer prediction disabled layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF; layer_flags[1] = VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_REF_LAST; #endif break; } case 1: { // 2-layers, 3-frame period int ids[3] = {0,1,1}; cfg.ts_number_layers = 2; cfg.ts_periodicity = 3; cfg.ts_rate_decimator[0] = 3; cfg.ts_rate_decimator[1] = 1; memcpy(cfg.ts_layer_id, ids, sizeof(ids)); // 0=L, 1=GF, Intra-layer prediction enabled layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF; layer_flags[1] = layer_flags[2] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST; break; } case 2: { // 3-layers, 6-frame period int ids[6] = {0,2,2,1,2,2}; cfg.ts_number_layers = 3; cfg.ts_periodicity = 6; cfg.ts_rate_decimator[0] = 6; cfg.ts_rate_decimator[1] = 3; cfg.ts_rate_decimator[2] = 1; memcpy(cfg.ts_layer_id, ids, sizeof(ids)); // 0=L, 1=GF, 2=ARF, Intra-layer prediction enabled layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF; layer_flags[3] = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST; layer_flags[1] = layer_flags[2] = layer_flags[4] = layer_flags[5] = VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_LAST; break; } case 3: { // 3-layers, 4-frame period int ids[4] = {0,2,1,2}; cfg.ts_number_layers = 3; cfg.ts_periodicity = 4; cfg.ts_rate_decimator[0] = 4; cfg.ts_rate_decimator[1] = 2; cfg.ts_rate_decimator[2] = 1; memcpy(cfg.ts_layer_id, ids, sizeof(ids)); // 0=L, 1=GF, 2=ARF, Intra-layer prediction disabled layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF; layer_flags[2] = VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_LAST; layer_flags[1] = layer_flags[3] = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF; break; } case 4: { // 3-layers, 4-frame period int ids[4] = {0,2,1,2}; cfg.ts_number_layers = 3; cfg.ts_periodicity = 4; cfg.ts_rate_decimator[0] = 4; cfg.ts_rate_decimator[1] = 2; cfg.ts_rate_decimator[2] = 1; memcpy(cfg.ts_layer_id, ids, sizeof(ids)); // 0=L, 1=GF, 2=ARF, Intra-layer prediction enabled in layer 1, // disabled in layer 2 layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF; layer_flags[2] = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ARF; layer_flags[1] = layer_flags[3] = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF; break; } case 5: { // 3-layers, 4-frame period int ids[4] = {0,2,1,2}; cfg.ts_number_layers = 3; cfg.ts_periodicity = 4; cfg.ts_rate_decimator[0] = 4; cfg.ts_rate_decimator[1] = 2; cfg.ts_rate_decimator[2] = 1; memcpy(cfg.ts_layer_id, ids, sizeof(ids)); // 0=L, 1=GF, 2=ARF, Intra-layer prediction enabled layer_flags[0] = VPX_EFLAG_FORCE_KF | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF; layer_flags[2] = VP8_EFLAG_NO_REF_ARF | VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_ARF; layer_flags[1] = layer_flags[3] = VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF; break; } case 6: { // NOTE: Probably of academic interest only // 5-layers, 16-frame period int ids[16] = {0,4,3,4,2,4,3,4,1,4,3,4,2,4,3,4}; cfg.ts_number_layers = 5; cfg.ts_periodicity = 16; cfg.ts_rate_decimator[0] = 16; cfg.ts_rate_decimator[1] = 8; cfg.ts_rate_decimator[2] = 4; cfg.ts_rate_decimator[3] = 2; cfg.ts_rate_decimator[4] = 1; memcpy(cfg.ts_layer_id, ids, sizeof(ids)); layer_flags[0] = VPX_EFLAG_FORCE_KF; layer_flags[1] = layer_flags[3] = layer_flags[5] = layer_flags[7] = layer_flags[9] = layer_flags[11] = layer_flags[13] = layer_flags[15] = VP8_EFLAG_NO_UPD_LAST | VP8_EFLAG_NO_UPD_GF | VP8_EFLAG_NO_UPD_ARF | VP8_EFLAG_NO_UPD_ENTROPY; layer_flags[2] = layer_flags[6] = layer_flags[10] = layer_flags[14] = 0; layer_flags[4] = layer_flags[12] = VP8_EFLAG_NO_REF_LAST; layer_flags[8] = VP8_EFLAG_NO_REF_LAST | VP8_EFLAG_NO_REF_GF | VP8_EFLAG_NO_UPD_ENTROPY; break; } default: break; } // Open input file if(!(infile = fopen(argv[1], "rb"))) die("Failed to open %s for reading", argv[1]); // Open an output file for each stream for (i=0; i<cfg.ts_number_layers; i++) { char file_name[512]; sprintf (file_name, "%s_%d.ivf", argv[2], i); if (!(outfile[i] = fopen(file_name, "wb"))) die("Failed to open %s for writing", file_name); write_ivf_file_header(outfile[i], &cfg, 0); } // Initialize codec if (vpx_codec_enc_init (&codec, interface, &cfg, 0)) die_codec (&codec, "Failed to initialize encoder"); // Cap CPU & first I-frame size vpx_codec_control (&codec, VP8E_SET_CPUUSED, -6); vpx_codec_control (&codec, VP8E_SET_MAX_INTRA_BITRATE_PCT, 600); frame_avail = 1; while (frame_avail || got_data) { vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; flags = layer_flags[frame_cnt % cfg.ts_periodicity]; frame_avail = read_frame(infile, &raw); if (vpx_codec_encode(&codec, frame_avail? &raw : NULL, pts, 1, flags, VPX_DL_REALTIME)) die_codec(&codec, "Failed to encode frame"); // Reset KF flag layer_flags[0] &= ~VPX_EFLAG_FORCE_KF; got_data = 0; while ( (pkt = vpx_codec_get_cx_data(&codec, &iter)) ) { got_data = 1; switch (pkt->kind) { case VPX_CODEC_CX_FRAME_PKT: for (i=cfg.ts_layer_id[frame_cnt % cfg.ts_periodicity]; i<cfg.ts_number_layers; i++) { write_ivf_frame_header(outfile[i], pkt); if (fwrite(pkt->data.frame.buf, 1, pkt->data.frame.sz, outfile[i])); frames_in_layer[i]++; } break; default: break; } printf (pkt->kind == VPX_CODEC_CX_FRAME_PKT && (pkt->data.frame.flags & VPX_FRAME_IS_KEY)? "K":"."); fflush (stdout); } frame_cnt++; pts += frame_duration; } printf ("\n"); fclose (infile); printf ("Processed %d frames.\n",frame_cnt-1); if (vpx_codec_destroy(&codec)) die_codec (&codec, "Failed to destroy codec"); // Try to rewrite the output file headers with the actual frame count for (i=0; i<cfg.ts_number_layers; i++) { if (!fseek(outfile[i], 0, SEEK_SET)) write_ivf_file_header (outfile[i], &cfg, frames_in_layer[i]); fclose (outfile[i]); } return EXIT_SUCCESS; }
void WebMVideoProcessor::operator()() { //cout << "Started encoding video frame\n"; long long videoTime = (encoder->cfg.g_timebase.den*curFrame)/encoder->frameRateDen; long long frameDuration = ((encoder->cfg.g_timebase.den*(curFrame+1))/encoder->frameRateDen) - videoTime; //cout << "Encoding video frame at time " << double(videoTime)/30.0 << endl; int res = vpx_codec_encode(&(encoder->encoder), &(encoder->raw), videoTime, frameDuration, 0, 15000); if (res != VPX_CODEC_OK) { cout << "ERROR IN ENCODING: " << res << endl; } //cout << "Finished encoding video frame\n"; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; while ((pkt = vpx_codec_get_cx_data(&(encoder->encoder), &iter))) { switch (pkt->kind) { case VPX_CODEC_CX_FRAME_PKT: { long long videoTimeNS = ((1000000000ULL*videoTime)*encoder->cfg.g_timebase.num)/encoder->cfg.g_timebase.den; { boost::mutex::scoped_lock scoped_lock(muxerMutex); while(packetsToWrite.size()>0 && packetsToWrite.begin()->first < videoTimeNS) { if (!encoder->muxer_segment->AddFrame(packetsToWrite.begin()->second->packet, packetsToWrite.begin()->second->bytes, 2, //Audio track packetsToWrite.begin()->first, false // No audio keyframes )) { printf("\n Could not add audio frame.\n"); } free(packetsToWrite.begin()->second->packet); delete packetsToWrite.begin()->second; packetsToWrite.erase(packetsToWrite.begin()); } } //printf("curframe: %d %lld %d\n",curFrame, videoTimeNS/1000000, (curFrame % encoder->frameRateDen == 0)); //printf("PACKET SIZE: %lu\n",pkt->data.frame.sz); if (!encoder->muxer_segment->AddFrame((mkvmuxer::uint8*)pkt->data.frame.buf, pkt->data.frame.sz, 1, //Video track videoTimeNS, //timestamp curFrame % encoder->frameRateDen == 0 //1 keyframe per sec )) { printf("\n Could not add frame.\n"); } curFrame++; break; } default: break; } } //cout << "Finished muxing video frame\n"; }
static void enc_process(MSFilter *f) { mblk_t *im; uint64_t timems = f->ticker->time; uint32_t timestamp = (uint32_t)(timems*90); EncState *s = (EncState *)f->data; unsigned int flags = 0; vpx_codec_err_t err; MSPicture yuv; bool_t is_ref_frame=FALSE; ms_filter_lock(f); #ifdef AVPF_DEBUG ms_message("VP8 enc_process:"); #endif if (!s->ready) { ms_queue_flush(f->inputs[0]); ms_filter_unlock(f); return; } if ((im = ms_queue_peek_last(f->inputs[0])) != NULL) { vpx_image_t img; flags = 0; ms_yuv_buf_init_from_mblk(&yuv, im); vpx_img_wrap(&img, VPX_IMG_FMT_I420, s->vconf.vsize.width, s->vconf.vsize.height, 1, yuv.planes[0]); if ((s->avpf_enabled != TRUE) && ms_video_starter_need_i_frame(&s->starter, f->ticker->time)) { s->force_keyframe = TRUE; } if (s->force_keyframe == TRUE) { ms_message("Forcing vp8 key frame for filter [%p]", f); flags = VPX_EFLAG_FORCE_KF; } else if (s->avpf_enabled == TRUE) { if (s->frame_count == 0) s->force_keyframe = TRUE; enc_fill_encoder_flags(s, &flags); } #ifdef AVPF_DEBUG ms_message("VP8 encoder frames state:"); ms_message("\tgolden: count=%" PRIi64 ", picture_id=0x%04x, ack=%s", s->frames_state.golden.count, s->frames_state.golden.picture_id, (s->frames_state.golden.acknowledged == TRUE) ? "Y" : "N"); ms_message("\taltref: count=%" PRIi64 ", picture_id=0x%04x, ack=%s", s->frames_state.altref.count, s->frames_state.altref.picture_id, (s->frames_state.altref.acknowledged == TRUE) ? "Y" : "N"); #endif err = vpx_codec_encode(&s->codec, &img, s->frame_count, 1, flags, 1000000LL/(2*(int)s->vconf.fps)); /*encoder has half a framerate interval to encode*/ if (err) { ms_error("vpx_codec_encode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec)); } else { vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; MSList *list = NULL; /* Update the frames state. */ is_ref_frame=FALSE; if (flags & VPX_EFLAG_FORCE_KF) { enc_mark_reference_frame_as_sent(s, VP8_GOLD_FRAME); enc_mark_reference_frame_as_sent(s, VP8_ALTR_FRAME); s->frames_state.golden.is_independant=TRUE; s->frames_state.altref.is_independant=TRUE; s->frames_state.last_independent_frame=s->frame_count; s->force_keyframe = FALSE; is_ref_frame=TRUE; }else if (flags & VP8_EFLAG_FORCE_GF) { enc_mark_reference_frame_as_sent(s, VP8_GOLD_FRAME); is_ref_frame=TRUE; }else if (flags & VP8_EFLAG_FORCE_ARF) { enc_mark_reference_frame_as_sent(s, VP8_ALTR_FRAME); is_ref_frame=TRUE; }else if (flags & VP8_EFLAG_NO_REF_LAST) { enc_mark_reference_frame_as_sent(s, VP8_LAST_FRAME); is_ref_frame=is_reconstruction_frame_sane(s,flags); } if (is_frame_independent(flags)){ s->frames_state.last_independent_frame=s->frame_count; } /* Pack the encoded frame. */ while( (pkt = vpx_codec_get_cx_data(&s->codec, &iter)) ) { if ((pkt->kind == VPX_CODEC_CX_FRAME_PKT) && (pkt->data.frame.sz > 0)) { Vp8RtpFmtPacket *packet = ms_new0(Vp8RtpFmtPacket, 1); packet->m = allocb(pkt->data.frame.sz, 0); memcpy(packet->m->b_wptr, pkt->data.frame.buf, pkt->data.frame.sz); packet->m->b_wptr += pkt->data.frame.sz; mblk_set_timestamp_info(packet->m, timestamp); packet->pd = ms_new0(Vp8RtpFmtPayloadDescriptor, 1); packet->pd->start_of_partition = TRUE; packet->pd->non_reference_frame = s->avpf_enabled && !is_ref_frame; if (s->avpf_enabled == TRUE) { packet->pd->extended_control_bits_present = TRUE; packet->pd->pictureid_present = TRUE; packet->pd->pictureid = s->picture_id; } else { packet->pd->extended_control_bits_present = FALSE; packet->pd->pictureid_present = FALSE; } if (s->flags & VPX_CODEC_USE_OUTPUT_PARTITION) { packet->pd->pid = (uint8_t)pkt->data.frame.partition_id; if (!(pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT)) { mblk_set_marker_info(packet->m, TRUE); } } else { packet->pd->pid = 0; mblk_set_marker_info(packet->m, TRUE); } list = ms_list_append(list, packet); } } #ifdef AVPF_DEBUG ms_message("VP8 encoder picture_id=%i ***| %s | %s | %s | %s", (int)s->picture_id, (flags & VPX_EFLAG_FORCE_KF) ? "KF " : (flags & VP8_EFLAG_FORCE_GF) ? "GF " : (flags & VP8_EFLAG_FORCE_ARF) ? "ARF" : " ", (flags & VP8_EFLAG_NO_REF_GF) ? "NOREFGF" : " ", (flags & VP8_EFLAG_NO_REF_ARF) ? "NOREFARF" : " ", (flags & VP8_EFLAG_NO_REF_LAST) ? "NOREFLAST" : " "); #endif vp8rtpfmt_packer_process(&s->packer, list, f->outputs[0], f->factory); /* Handle video starter if AVPF is not enabled. */ s->frame_count++; if ((s->avpf_enabled != TRUE) && (s->frame_count == 1)) { ms_video_starter_first_frame(&s->starter, f->ticker->time); } /* Increment the pictureID. */ s->picture_id++; #ifdef PICTURE_ID_ON_16_BITS if (s->picture_id == 0) s->picture_id = 0x8000; #else if (s->picture_id == 0x0080) s->picture_id = 0; #endif } } ms_filter_unlock(f); ms_queue_flush(f->inputs[0]); }
int main(int argc, char **argv) { FILE *infile, *outfile; vpx_codec_ctx_t codec; vpx_codec_enc_cfg_t cfg; int frame_cnt = 0; vpx_image_t raw; vpx_codec_err_t res; long width; long height; int frame_avail; int got_data; int flags = 0; /* Open files */ if(argc!=5) die("Usage: %s <width> <height> <infile> <outfile>\n", argv[0]); width = strtol(argv[1], NULL, 0); height = strtol(argv[2], NULL, 0); if(width < 16 || width%2 || height <16 || height%2) die("Invalid resolution: %ldx%ld", width, height); if(!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, width, height, 1)) die("Faile to allocate image", width, height); if(!(outfile = fopen(argv[4], "wb"))) die("Failed to open %s for writing", argv[4]); printf("Using %s\n",vpx_codec_iface_name(interface)); /* Populate encoder configuration */ // res = vpx_codec_enc_config_default(interface, &cfg, 0); // if(res) { // printf("Failed to get config: %s\n", vpx_codec_err_to_string(res)); // return EXIT_FAILURE; // } // /* Update the default configuration with our settings */ // cfg.rc_target_bitrate = width * height * cfg.rc_target_bitrate // / cfg.g_w / cfg.g_h; // cfg.g_w = width; // cfg.g_h = height; // write_ivf_file_header(outfile, &cfg, 0); /* Open input file for this encoding pass */ if(!(infile = fopen(argv[3], "rb"))) die("Failed to open %s for reading", argv[3]); /* Initialize codec */ // if(vpx_codec_enc_init(&codec, interface, &cfg, 0)) // die_codec(&codec, "Failed to initialize encoder"); // frame_avail = 1; got_data = 0; while(frame_avail || got_data) { vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; frame_avail = read_frame(infile, &raw); // if(vpx_codec_encode(&codec, frame_avail? &raw : NULL, frame_cnt, // 1, flags, VPX_DL_REALTIME)) // die_codec(&codec, "Failed to encode frame"); // got_data = 0; while( (pkt = vpx_codec_get_cx_data(&codec, &iter)) ) { got_data = 1; switch(pkt->kind) { case VPX_CODEC_CX_FRAME_PKT: // write_ivf_frame_header(outfile, pkt); // if(fwrite(pkt->data.frame.buf, 1, pkt->data.frame.sz, // outfile)); // break; // default: break; } printf(pkt->kind == VPX_CODEC_CX_FRAME_PKT && (pkt->data.frame.flags & VPX_FRAME_IS_KEY)? "K":"."); fflush(stdout); } frame_cnt++; } printf("\n"); fclose(infile); printf("Processed %d frames.\n",frame_cnt-1); if(vpx_codec_destroy(&codec)) // die_codec(&codec, "Failed to destroy codec"); // /* Try to rewrite the file header with the actual frame count */ if(!fseek(outfile, 0, SEEK_SET)) write_ivf_file_header(outfile, &cfg, frame_cnt-1); fclose(outfile); return EXIT_SUCCESS; }
static gboolean gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame) { GstVP8Enc *encoder; const GstVideoState *state; vpx_codec_err_t status; int flags = 0; vpx_codec_iter_t iter = NULL; const vpx_codec_cx_pkt_t *pkt; vpx_image_t *image; GstVP8EncCoderHook *hook; GST_DEBUG_OBJECT (base_video_encoder, "handle_frame"); encoder = GST_VP8_ENC (base_video_encoder); state = gst_base_video_encoder_get_state (base_video_encoder); encoder->n_frames++; GST_DEBUG_OBJECT (base_video_encoder, "size %d %d", state->width, state->height); if (!encoder->inited) { vpx_codec_enc_cfg_t cfg; status = vpx_codec_enc_config_default (&vpx_codec_vp8_cx_algo, &cfg, 0); if (status != VPX_CODEC_OK) { GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, ("Failed to get default encoder configuration"), ("%s", gst_vpx_error_name (status))); return FALSE; } cfg.g_w = state->width; cfg.g_h = state->height; cfg.g_timebase.num = state->fps_d; cfg.g_timebase.den = state->fps_n; cfg.g_error_resilient = encoder->error_resilient; cfg.g_lag_in_frames = encoder->max_latency; cfg.g_threads = encoder->threads; cfg.rc_end_usage = encoder->mode; if (encoder->bitrate) { cfg.rc_target_bitrate = encoder->bitrate / 1000; } else { cfg.rc_min_quantizer = 63 - encoder->quality * 5.0; cfg.rc_max_quantizer = 63 - encoder->quality * 5.0; cfg.rc_target_bitrate = encoder->bitrate; } cfg.kf_mode = VPX_KF_AUTO; cfg.kf_min_dist = 0; cfg.kf_max_dist = encoder->max_keyframe_distance; cfg.g_pass = encoder->multipass_mode; if (encoder->multipass_mode == VPX_RC_FIRST_PASS) { encoder->first_pass_cache_content = g_byte_array_sized_new (4096); } else if (encoder->multipass_mode == VPX_RC_LAST_PASS) { GError *err = NULL; if (!encoder->multipass_cache_file) { GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ, ("No multipass cache file provided"), (NULL)); return GST_FLOW_ERROR; } if (!g_file_get_contents (encoder->multipass_cache_file, (gchar **) & encoder->last_pass_cache_content.buf, &encoder->last_pass_cache_content.sz, &err)) { GST_ELEMENT_ERROR (encoder, RESOURCE, OPEN_READ, ("Failed to read multipass cache file provided"), ("%s", err->message)); g_error_free (err); return GST_FLOW_ERROR; } cfg.rc_twopass_stats_in = encoder->last_pass_cache_content; } status = vpx_codec_enc_init (&encoder->encoder, &vpx_codec_vp8_cx_algo, &cfg, 0); if (status != VPX_CODEC_OK) { GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, ("Failed to initialize encoder"), ("%s", gst_vpx_error_name (status))); return GST_FLOW_ERROR; } status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED, 0); if (status != VPX_CODEC_OK) { GST_WARNING_OBJECT (encoder, "Failed to set VP8E_SET_CPUUSED to 0: %s", gst_vpx_error_name (status)); } status = vpx_codec_control (&encoder->encoder, VP8E_SET_ENABLEAUTOALTREF, (encoder->auto_alt_ref_frames ? 1 : 0)); if (status != VPX_CODEC_OK) { GST_WARNING_OBJECT (encoder, "Failed to set VP8E_ENABLEAUTOALTREF to %d: %s", (encoder->auto_alt_ref_frames ? 1 : 0), gst_vpx_error_name (status)); } gst_base_video_encoder_set_latency (base_video_encoder, 0, gst_util_uint64_scale (encoder->max_latency, state->fps_d * GST_SECOND, state->fps_n)); encoder->inited = TRUE; } image = gst_vp8_enc_buffer_to_image (encoder, frame->sink_buffer); hook = g_slice_new0 (GstVP8EncCoderHook); hook->image = image; frame->coder_hook = hook; if (encoder->force_keyframe) { flags |= VPX_EFLAG_FORCE_KF; } status = vpx_codec_encode (&encoder->encoder, image, encoder->n_frames, 1, flags, speed_table[encoder->speed]); if (status != 0) { GST_ELEMENT_ERROR (encoder, LIBRARY, ENCODE, ("Failed to encode frame"), ("%s", gst_vpx_error_name (status))); g_slice_free (GstVP8EncCoderHook, hook); frame->coder_hook = NULL; g_slice_free (vpx_image_t, image); return FALSE; } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); while (pkt != NULL) { GstBuffer *buffer; gboolean invisible; GST_DEBUG_OBJECT (encoder, "packet %u type %d", (guint) pkt->data.frame.sz, pkt->kind); if (pkt->kind == VPX_CODEC_STATS_PKT && encoder->multipass_mode == VPX_RC_FIRST_PASS) { GST_LOG_OBJECT (encoder, "handling STATS packet"); g_byte_array_append (encoder->first_pass_cache_content, pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz); frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder); if (frame != NULL) { buffer = gst_buffer_new (); GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_PREROLL); frame->src_buffer = buffer; gst_base_video_encoder_finish_frame (base_video_encoder, frame); } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); continue; } else if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) { GST_LOG_OBJECT (encoder, "non frame pkt: %d", pkt->kind); pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); continue; } invisible = (pkt->data.frame.flags & VPX_FRAME_IS_INVISIBLE) != 0; frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder); g_assert (frame != NULL); frame->is_sync_point = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; hook = frame->coder_hook; buffer = gst_buffer_new_and_alloc (pkt->data.frame.sz); memcpy (GST_BUFFER_DATA (buffer), pkt->data.frame.buf, pkt->data.frame.sz); if (hook->image) g_slice_free (vpx_image_t, hook->image); hook->image = NULL; if (invisible) { hook->invisible = g_list_append (hook->invisible, buffer); } else { frame->src_buffer = buffer; gst_base_video_encoder_finish_frame (base_video_encoder, frame); } pkt = vpx_codec_get_cx_data (&encoder->encoder, &iter); } return TRUE; }