Ejemplo n.º 1
1
HRESULT GetSampleFromMFStreamer(/* out */ const vpx_codec_cx_pkt_t *& vpkt)
{
	//printf("Get Sample...\n");

	IMFSample *videoSample = NULL;

	// Initial read results in a null pSample??
	CHECK_HR(videoReader->ReadSample(
		MF_SOURCE_READER_ANY_STREAM,    // Stream index.
		0,                              // Flags.
		&streamIndex,                   // Receives the actual stream index. 
		&flags,                         // Receives status flags.
		&llVideoTimeStamp,                   // Receives the time stamp.
		&videoSample                        // Receives the sample or NULL.
		), L"Error reading video sample.");

	if (!videoSample)
	{
		printf("Failed to get video sample from MF.\n");
	}
	else
	{
		DWORD nCurrBufferCount = 0;
		CHECK_HR(videoSample->GetBufferCount(&nCurrBufferCount), L"Failed to get the buffer count from the video sample.\n");

		IMFMediaBuffer * pMediaBuffer;
		CHECK_HR(videoSample->ConvertToContiguousBuffer(&pMediaBuffer), L"Failed to extract the video sample into a raw buffer.\n");

		DWORD nCurrLen = 0;
		CHECK_HR(pMediaBuffer->GetCurrentLength(&nCurrLen), L"Failed to get the length of the raw buffer holding the video sample.\n");

		byte *imgBuff;
		DWORD buffCurrLen = 0;
		DWORD buffMaxLen = 0;
		pMediaBuffer->Lock(&imgBuff, &buffMaxLen, &buffCurrLen);
		
		/*BYTE *i420 = new BYTE[4608000];
		YUY2ToI420(WIDTH, HEIGHT, STRIDE, imgBuff, i420);
		vpx_image_t* img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, i420);*/
		
		vpx_image_t* const img = vpx_img_wrap(&_rawImage, VIDEO_INPUT_FORMAT, _vpxConfig.g_w, _vpxConfig.g_h, 1, imgBuff);
		
		const vpx_codec_cx_pkt_t * pkt;
		vpx_enc_frame_flags_t flags = 0;
		
		if (vpx_codec_encode(&_vpxCodec, &_rawImage, _sampleCount, 1, flags, VPX_DL_REALTIME)) {
			printf("VPX codec failed to encode the frame.\n");
			return -1;
		}
		else {
			vpx_codec_iter_t iter = NULL;

			while ((pkt = vpx_codec_get_cx_data(&_vpxCodec, &iter))) {
				switch (pkt->kind) {
				case VPX_CODEC_CX_FRAME_PKT:                                
					vpkt = pkt; // const_cast<vpx_codec_cx_pkt_t **>(&pkt);
					break;
				default:
					break;
				}

				printf("%s %i\n", pkt->kind == VPX_CODEC_CX_FRAME_PKT && (pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? "K" : ".", pkt->data.frame.sz);
			}
		}

		_sampleCount++;

		vpx_img_free(img);

		pMediaBuffer->Unlock();
		pMediaBuffer->Release();

		//delete i420;

		videoSample->Release();

		return S_OK;
	}
}
Ejemplo n.º 2
0
void Core::sendCallVideo(int32_t callId)
{
    if (!calls[callId].active || !calls[callId].videoEnabled)
        return;

    vpx_image frame = camera->getLastFrame().createVpxImage();
    if (frame.w && frame.h)
    {
        int result;
        if ((result = toxav_prepare_video_frame(toxav, callId, videobuf, videobufsize, &frame)) < 0)
        {
            qDebug() << QString("Core: toxav_prepare_video_frame: error %1").arg(result);
            vpx_img_free(&frame);
            calls[callId].sendVideoTimer->start();
            return;
        }

        if ((result = toxav_send_video(toxav, callId, (uint8_t*)videobuf, result)) < 0)
            qDebug() << QString("Core: toxav_send_video error: %1").arg(result);

        vpx_img_free(&frame);
    }
    else
    {
        qDebug("Core::sendCallVideo: Invalid frame (bad camera ?)");
    }

    calls[callId].sendVideoTimer->start();
}
Ejemplo n.º 3
0
static void testing_decode(vpx_codec_ctx_t *encoder, vpx_codec_ctx_t *decoder,
                           unsigned int frame_out, int *mismatch_seen) {
  vpx_image_t enc_img, dec_img;
  struct vp9_ref_frame ref_enc, ref_dec;

  if (*mismatch_seen) return;

  ref_enc.idx = 0;
  ref_dec.idx = 0;
  if (vpx_codec_control(encoder, VP9_GET_REFERENCE, &ref_enc))
    die_codec(encoder, "Failed to get encoder reference frame");
  enc_img = ref_enc.img;
  if (vpx_codec_control(decoder, VP9_GET_REFERENCE, &ref_dec))
    die_codec(decoder, "Failed to get decoder reference frame");
  dec_img = ref_dec.img;

  if (!compare_img(&enc_img, &dec_img)) {
    int y[4], u[4], v[4];

    *mismatch_seen = 1;

    find_mismatch(&enc_img, &dec_img, y, u, v);
    printf(
        "Encode/decode mismatch on frame %d at"
        " Y[%d, %d] {%d/%d},"
        " U[%d, %d] {%d/%d},"
        " V[%d, %d] {%d/%d}",
        frame_out, y[0], y[1], y[2], y[3], u[0], u[1], u[2], u[3], v[0], v[1],
        v[2], v[3]);
  }

  vpx_img_free(&enc_img);
  vpx_img_free(&dec_img);
}
Ejemplo n.º 4
0
static void close_video_device(void *handle) {
    if (handle >= (void *)2) {
        native_video_close(*(void **)handle);
        vpx_img_free(&input);
    }
    video_device_status = false;
}
Ejemplo n.º 5
0
void krad_vpx_encoder_finish (krad_vpx_encoder_t *kradvpx) {

	if (kradvpx->image != NULL) {
		vpx_img_free (kradvpx->image);
		kradvpx->image = NULL;
	}

}
Ejemplo n.º 6
0
LumaEncoder::~LumaEncoder()
{
    if (m_initialized)
    {
        vpx_img_free(&m_rawFrame);
        if (vpx_codec_destroy(&m_codec))
            fprintf(stderr, "Failed to destroy codec.\n");
    }
}
Ejemplo n.º 7
0
int cleanup(encoding_context *context) {
	fclose(context->ebml.stream);
	vpx_img_free(&context->vpx_image);
	if (0!=vpx_codec_destroy(&context->codec)) return 1099;
	if (context->frame_durations) {
		free(context->frame_durations);
	}
	return 0;
}
Ejemplo n.º 8
0
void vc_iterate(VCSession *vc)
{
    if (!vc) {
        return;
    }

    pthread_mutex_lock(vc->queue_mutex);

    struct RTPMessage *p;

    if (!rb_read(vc->vbuf_raw, (void **)&p)) {
        LOGGER_TRACE(vc->log, "no Video frame data available");
        pthread_mutex_unlock(vc->queue_mutex);
        return;
    }

    pthread_mutex_unlock(vc->queue_mutex);
    const struct RTPHeader *const header = &p->header;

    uint32_t full_data_len;

    if (header->flags & RTP_LARGE_FRAME) {
        full_data_len = header->data_length_full;
        LOGGER_DEBUG(vc->log, "vc_iterate:001:full_data_len=%d", (int)full_data_len);
    } else {
        full_data_len = p->len;
        LOGGER_DEBUG(vc->log, "vc_iterate:002");
    }

    LOGGER_DEBUG(vc->log, "vc_iterate: rb_read p->len=%d p->header.xe=%d", (int)full_data_len, p->header.xe);
    LOGGER_DEBUG(vc->log, "vc_iterate: rb_read rb size=%d", (int)rb_size(vc->vbuf_raw));
    const vpx_codec_err_t rc = vpx_codec_decode(vc->decoder, p->data, full_data_len, nullptr, MAX_DECODE_TIME_US);
    free(p);

    if (rc != VPX_CODEC_OK) {
        LOGGER_ERROR(vc->log, "Error decoding video: %d %s", (int)rc, vpx_codec_err_to_string(rc));
        return;
    }

    /* Play decoded images */
    vpx_codec_iter_t iter = nullptr;

    for (vpx_image_t *dest = vpx_codec_get_frame(vc->decoder, &iter);
            dest != nullptr;
            dest = vpx_codec_get_frame(vc->decoder, &iter)) {
        if (vc->vcb) {
            vc->vcb(vc->av, vc->friend_number, dest->d_w, dest->d_h,
                    (const uint8_t *)dest->planes[0], (const uint8_t *)dest->planes[1], (const uint8_t *)dest->planes[2],
                    dest->stride[0], dest->stride[1], dest->stride[2], vc->vcb_user_data);
        }

        vpx_img_free(dest); // is this needed? none of the VPx examples show that
    }
}
Ejemplo n.º 9
0
VP8TrackEncoder::~VP8TrackEncoder()
{
  if (mInitialized) {
    vpx_codec_destroy(mVPXContext);
  }

  if (mVPXImageWrapper) {
    vpx_img_free(mVPXImageWrapper);
  }
  MOZ_COUNT_DTOR(VP8TrackEncoder);
}
Ejemplo n.º 10
0
 VpxImage& operator =(const VpxImage& src) {
   if (image_) {
     vpx_img_free(image_);
     image_ = 0;
   }
   assert(src.format() == VPX_IMG_FMT_RGB24);
   image_ = vpx_img_alloc(NULL, src.format(), src.width(), src.height(), 1);
   if (image_) {
     memcpy(image_->img_data, src.buffer(), width()*height()*3);
   }
   return *this;
 }
Ejemplo n.º 11
0
/*
 * Close codec.
 */
static pj_status_t pj_vpx_codec_close(pjmedia_vid_codec *codec) {
    vpx_private *vpx;

    PJ_ASSERT_RETURN(codec, PJ_EINVAL);
    vpx = (vpx_private*) codec->codec_data;

    vpx_codec_destroy(&vpx->decoder);
    vpx_codec_destroy(&vpx->encoder);
    vpx_img_free(&vpx->rawimg);

    return PJ_SUCCESS;
}
Ejemplo n.º 12
0
void Core::playCallVideo(ToxAv*, int32_t callId, vpx_image_t* img, void *user_data)
{
    Q_UNUSED(user_data);

    if (!calls[callId].active || !calls[callId].videoEnabled)
        return;

    if (videoBusyness >= 1)
        qWarning() << "Core: playCallVideo: Busy, dropping current frame";
    else
        emit Widget::getInstance()->getCore()->videoFrameReceived(img);
    vpx_img_free(img);
}
Ejemplo n.º 13
0
	VPXEncoder::~VPXEncoder()
	{
		if (_rawImage != NULL) {
			vpx_img_free(_rawImage);
		}

		if (_vpxCodec != NULL) {
			vpx_codec_destroy(_vpxCodec);
		}

		if (_vpxDecoder != NULL) {
			vpx_codec_destroy(_vpxDecoder);
		}
	}
Ejemplo n.º 14
0
void krad_vpx_encoder_destroy (krad_vpx_encoder_t *kradvpx) {

	if (kradvpx->image != NULL) {
		vpx_img_free (kradvpx->image);
		kradvpx->image = NULL;
	}
	vpx_codec_destroy (&kradvpx->encoder);

#ifdef BENCHMARK
krad_timer_destroy(kradvpx->krad_timer);
#endif

	free (kradvpx);

}
Ejemplo n.º 15
0
WebmExporter::~WebmExporter()
{
  if (_img) {
    vpx_img_free(_img);
  }
  // Flush encoder.
  while (add_frame(nullptr))
    ;

  if (vpx_codec_destroy(&_codec)) {
    codec_error("failed to destroy codec");
    return;
  }
  if (!_segment.Finalize()) {
    std::cerr << "couldn't finalise muxer segment" << std::endl;
    return;
  }
  _writer.Close();
}
Ejemplo n.º 16
0
// Closes the instance of the component.
// Release all storage associated with this instance.
// Note that if a component's Open function fails with an error, its Close function will still be called.
ComponentResult
VP8_Encoder_Close(
                  VP8EncoderGlobals glob,
                  ComponentInstance self)
{
  dbg_printf("[vp8e - %08lx]  Close Called\n", (UInt32)glob);

  if (glob)
  {
    if (glob->stats.buf != NULL)
    {
      free(glob->stats.buf);
      glob->stats.buf =NULL;
      glob->stats.sz=0;
    }

    if (glob->codec) //see if i've initialized the vpx_codec
    {
      if (vpx_codec_destroy(glob->codec))
        dbg_printf("[vp8e - %08lx] Failed to destroy codec\n", (UInt32)glob);

      free(glob->codec);
    }

    ICMCompressionSessionOptionsRelease(glob->sessionOptions);
    glob->sessionOptions = NULL;

    if (glob->raw)
    {
      vpx_img_free(glob->raw);
      free(glob->raw);
    }

    if (glob->sourceQueue.queue != NULL)
      free(glob->sourceQueue.queue);

    free(glob);
  }

  return noErr;
}
Ejemplo n.º 17
0
static switch_status_t switch_vpx_destroy(switch_codec_t *codec)
{
	vpx_context_t *context = (vpx_context_t *)codec->private_info;

	if (context) {
		if ((codec->flags & SWITCH_CODEC_FLAG_ENCODE)) {
			vpx_codec_destroy(&context->encoder);
		}

		if ((codec->flags & SWITCH_CODEC_FLAG_DECODE)) {
			vpx_codec_destroy(&context->decoder);
		}

		if (context->pic) {
			vpx_img_free(context->pic);
			context->pic = NULL;
		}
		if (context->vpx_packet_buffer) {
			switch_buffer_destroy(&context->vpx_packet_buffer);
			context->vpx_packet_buffer = NULL;
		}
	}
	return SWITCH_STATUS_SUCCESS;
}
Ejemplo n.º 18
0
void *encode_video_thread(void *arg)
{
    INFO("Started encode video thread!");

    av_session_t *_phone = arg;

    _phone->running_encvid = 1;
    //CodecState *cs = get_cs_temp(_phone->av);
    AVPacket pkt1, *packet = &pkt1;
    //int p = 0;
    //int got_packet;
    int video_frame_finished;
    AVFrame *s_video_frame;
    AVFrame *webcam_frame;
    s_video_frame = avcodec_alloc_frame();
    webcam_frame = avcodec_alloc_frame();
    //AVPacket enc_video_packet;

    uint8_t *buffer;
    int numBytes;
    /* Determine required buffer size and allocate buffer */
    numBytes = avpicture_get_size(PIX_FMT_YUV420P, _phone->webcam_decoder_ctx->width, _phone->webcam_decoder_ctx->height);
    buffer = (uint8_t *)av_calloc(numBytes * sizeof(uint8_t), 1);
    avpicture_fill((AVPicture *)s_video_frame, buffer, PIX_FMT_YUV420P, _phone->webcam_decoder_ctx->width,
                   _phone->webcam_decoder_ctx->height);
    _phone->sws_ctx = sws_getContext(_phone->webcam_decoder_ctx->width, _phone->webcam_decoder_ctx->height,
                                     _phone->webcam_decoder_ctx->pix_fmt, _phone->webcam_decoder_ctx->width, _phone->webcam_decoder_ctx->height,
                                     PIX_FMT_YUV420P,
                                     SWS_BILINEAR, NULL, NULL, NULL);


    vpx_image_t *image =
        vpx_img_alloc(NULL, VPX_IMG_FMT_I420, _phone->webcam_decoder_ctx->width, _phone->webcam_decoder_ctx->height, 1);

    //uint32_t frame_counter = 0;
    while (_phone->running_encvid) {

        if (av_read_frame(_phone->video_format_ctx, packet) < 0) {
            printf("error reading frame\n");

            if (_phone->video_format_ctx->pb->error != 0)
                break;

            continue;
        }

        if (packet->stream_index == _phone->video_stream) {
            if (avcodec_decode_video2(_phone->webcam_decoder_ctx, webcam_frame, &video_frame_finished, packet) < 0) {
                printf("couldn't decode\n");
                continue;
            }

            av_free_packet(packet);
            sws_scale(_phone->sws_ctx, (uint8_t const * const *)webcam_frame->data, webcam_frame->linesize, 0,
                      _phone->webcam_decoder_ctx->height, s_video_frame->data, s_video_frame->linesize);
            /* create a new I-frame every 60 frames */
            //++p;
            /*
            if (p == 60) {

                s_video_frame->pict_type = AV_PICTURE_TYPE_BI ;
            } else if (p == 61) {
                s_video_frame->pict_type = AV_PICTURE_TYPE_I ;
                p = 0;
            } else {
                s_video_frame->pict_type = AV_PICTURE_TYPE_P ;
            }*/

            if (video_frame_finished) {
                memcpy(image->planes[VPX_PLANE_Y], s_video_frame->data[0],
                       s_video_frame->linesize[0] * _phone->webcam_decoder_ctx->height);
                memcpy(image->planes[VPX_PLANE_U], s_video_frame->data[1],
                       s_video_frame->linesize[1] * _phone->webcam_decoder_ctx->height / 2);
                memcpy(image->planes[VPX_PLANE_V], s_video_frame->data[2],
                       s_video_frame->linesize[2] * _phone->webcam_decoder_ctx->height / 2);
                toxav_send_video (_phone->av, image);
                //if (avcodec_encode_video2(cs->video_encoder_ctx, &enc_video_packet, s_video_frame, &got_packet) < 0) {
                /*if (vpx_codec_encode(&cs->v_encoder, image, frame_counter, 1, 0, 0) != VPX_CODEC_OK) {
                    printf("could not encode video frame\n");
                    continue;
                }
                ++frame_counter;

                vpx_codec_iter_t iter = NULL;
                vpx_codec_cx_pkt_t *pkt;
                while( (pkt = vpx_codec_get_cx_data(&cs->v_encoder, &iter)) ) {
                    if (pkt->kind == VPX_CODEC_CX_FRAME_PKT)
                        toxav_send_rtp_payload(_phone->av, TypeVideo, pkt->data.frame.buf, pkt->data.frame.sz);
                }*/
                //if (!got_packet) {
                //    continue;
                //}

                //if (!enc_video_packet.data) fprintf(stderr, "video packet data is NULL\n");

                //toxav_send_rtp_payload(_phone->av, TypeVideo, enc_video_packet.data, enc_video_packet.size);

                //av_free_packet(&enc_video_packet);
            }
        } else {
            av_free_packet(packet);
        }
    }

    vpx_img_free(image);

    /* clean up codecs */
    //pthread_mutex_lock(&cs->ctrl_mutex);
    av_free(buffer);
    av_free(webcam_frame);
    av_free(s_video_frame);
    sws_freeContext(_phone->sws_ctx);
    //avcodec_close(webcam_decoder_ctx);
    //avcodec_close(cs->video_encoder_ctx);
    //pthread_mutex_unlock(&cs->ctrl_mutex);

    _phone->running_encvid = -1;

    pthread_exit ( NULL );
}
int main(int argc, const char **argv) {
    AppInput app_input = {0};
    FILE *outfile;
    vpx_codec_ctx_t codec;
    vpx_codec_enc_cfg_t enc_cfg;
    SvcContext svc_ctx;
    uint32_t i;
    uint32_t frame_cnt = 0;
    vpx_image_t raw;
    vpx_codec_err_t res;
    int pts = 0;            /* PTS starts at 0 */
    int frame_duration = 1; /* 1 timebase tick per frame */
    vpx_codec_cx_pkt_t packet = {0};
    packet.kind = VPX_CODEC_CX_FRAME_PKT;

    memset(&svc_ctx, 0, sizeof(svc_ctx));
    svc_ctx.log_print = 1;
    exec_name = argv[0];
    parse_command_line(argc, argv, &app_input, &svc_ctx, &enc_cfg);

    // Allocate image buffer
    if (!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, enc_cfg.g_w, enc_cfg.g_h, 32))
        die("Failed to allocate image %dx%d\n", enc_cfg.g_w, enc_cfg.g_h);

    if (!(app_input.input_ctx.file = fopen(app_input.input_ctx.filename, "rb")))
        die("Failed to open %s for reading\n", app_input.input_ctx.filename);

    if (!(outfile = fopen(app_input.output_filename, "wb")))
        die("Failed to open %s for writing\n", app_input.output_filename);

    // Initialize codec
    if (vpx_svc_init(&svc_ctx, &codec, vpx_codec_vp9_cx(), &enc_cfg) !=
            VPX_CODEC_OK)
        die("Failed to initialize encoder\n");

    ivf_write_file_header(outfile, &enc_cfg, VP9_FOURCC, 0);

    // skip initial frames
    for (i = 0; i < app_input.frames_to_skip; ++i) {
        read_yuv_frame(&app_input.input_ctx, &raw);
    }

    // Encode frames
    while (frame_cnt < app_input.frames_to_code) {
        if (read_yuv_frame(&app_input.input_ctx, &raw)) break;

        res = vpx_svc_encode(&svc_ctx, &codec, &raw, pts, frame_duration,
                             VPX_DL_REALTIME);
        printf("%s", vpx_svc_get_message(&svc_ctx));
        if (res != VPX_CODEC_OK) {
            die_codec(&codec, "Failed to encode frame");
        }
        if (vpx_svc_get_frame_size(&svc_ctx) > 0) {
            packet.data.frame.pts = pts;
            packet.data.frame.sz = vpx_svc_get_frame_size(&svc_ctx);
            ivf_write_frame_header(outfile, &packet);
            (void)fwrite(vpx_svc_get_buffer(&svc_ctx), 1,
                         vpx_svc_get_frame_size(&svc_ctx), outfile);
        }
        ++frame_cnt;
        pts += frame_duration;
    }

    printf("Processed %d frames\n", frame_cnt);

    fclose(app_input.input_ctx.file);
    if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec");

    // rewrite the output file headers with the actual frame count, and
    // resolution of the highest layer
    if (!fseek(outfile, 0, SEEK_SET)) {
        // get resolution of highest layer
        if (VPX_CODEC_OK != vpx_svc_get_layer_resolution(&svc_ctx,
                svc_ctx.spatial_layers - 1,
                &enc_cfg.g_w,
                &enc_cfg.g_h)) {
            die("Failed to get output resolution");
        }
        ivf_write_file_header(outfile, &enc_cfg, VP9_FOURCC, frame_cnt);
    }
    fclose(outfile);
    vpx_img_free(&raw);

    // display average size, psnr
    printf("%s", vpx_svc_dump_statistics(&svc_ctx));

    vpx_svc_release(&svc_ctx);

    return EXIT_SUCCESS;
}
Ejemplo n.º 20
0
int main_loop(int argc, const char **argv_) {
  vpx_codec_ctx_t       decoder;
  char                  *fn = NULL;
  int                    i;
  uint8_t               *buf = NULL;
  size_t                 bytes_in_buffer = 0, buffer_size = 0;
  FILE                  *infile;
  int                    frame_in = 0, frame_out = 0, flipuv = 0, noblit = 0;
  int                    do_md5 = 0, progress = 0;
  int                    stop_after = 0, postproc = 0, summary = 0, quiet = 1;
  int                    arg_skip = 0;
  int                    ec_enabled = 0;
  const VpxInterface *interface = NULL;
  const VpxInterface *fourcc_interface = NULL;
  uint64_t dx_time = 0;
  struct arg               arg;
  char                   **argv, **argi, **argj;

  int                     single_file;
  int                     use_y4m = 1;
  vpx_codec_dec_cfg_t     cfg = {0};
#if CONFIG_VP8_DECODER
  vp8_postproc_cfg_t      vp8_pp_cfg = {0};
  int                     vp8_dbg_color_ref_frame = 0;
  int                     vp8_dbg_color_mb_modes = 0;
  int                     vp8_dbg_color_b_modes = 0;
  int                     vp8_dbg_display_mv = 0;
#endif
  int                     frames_corrupted = 0;
  int                     dec_flags = 0;
  int                     do_scale = 0;
  vpx_image_t             *scaled_img = NULL;
  int                     frame_avail, got_data;
  int                     num_external_frame_buffers = 0;
  struct ExternalFrameBufferList ext_fb_list = {0};

  const char *outfile_pattern = NULL;
  char outfile_name[PATH_MAX] = {0};
  FILE *outfile = NULL;

  MD5Context md5_ctx;
  unsigned char md5_digest[16];

  struct VpxDecInputContext input = {0};
  struct VpxInputContext vpx_input_ctx = {0};
  struct WebmInputContext webm_ctx = {0};
  input.vpx_input_ctx = &vpx_input_ctx;
  input.webm_ctx = &webm_ctx;

  /* Parse command line */
  exec_name = argv_[0];
  argv = argv_dup(argc - 1, argv_ + 1);

  for (argi = argj = argv; (*argj = *argi); argi += arg.argv_step) {
    memset(&arg, 0, sizeof(arg));
    arg.argv_step = 1;

    if (arg_match(&arg, &codecarg, argi)) {
      interface = get_vpx_decoder_by_name(arg.val);
      if (!interface)
        die("Error: Unrecognized argument (%s) to --codec\n", arg.val);
    } else if (arg_match(&arg, &looparg, argi)) {
      // no-op
    } else if (arg_match(&arg, &outputfile, argi))
      outfile_pattern = arg.val;
    else if (arg_match(&arg, &use_yv12, argi)) {
      use_y4m = 0;
      flipuv = 1;
    } else if (arg_match(&arg, &use_i420, argi)) {
      use_y4m = 0;
      flipuv = 0;
    } else if (arg_match(&arg, &flipuvarg, argi))
      flipuv = 1;
    else if (arg_match(&arg, &noblitarg, argi))
      noblit = 1;
    else if (arg_match(&arg, &progressarg, argi))
      progress = 1;
    else if (arg_match(&arg, &limitarg, argi))
      stop_after = arg_parse_uint(&arg);
    else if (arg_match(&arg, &skiparg, argi))
      arg_skip = arg_parse_uint(&arg);
    else if (arg_match(&arg, &postprocarg, argi))
      postproc = 1;
    else if (arg_match(&arg, &md5arg, argi))
      do_md5 = 1;
    else if (arg_match(&arg, &summaryarg, argi))
      summary = 1;
    else if (arg_match(&arg, &threadsarg, argi))
      cfg.threads = arg_parse_uint(&arg);
    else if (arg_match(&arg, &verbosearg, argi))
      quiet = 0;
    else if (arg_match(&arg, &scalearg, argi))
      do_scale = 1;
    else if (arg_match(&arg, &fb_arg, argi))
      num_external_frame_buffers = arg_parse_uint(&arg);

#if CONFIG_VP8_DECODER
    else if (arg_match(&arg, &addnoise_level, argi)) {
      postproc = 1;
      vp8_pp_cfg.post_proc_flag |= VP8_ADDNOISE;
      vp8_pp_cfg.noise_level = arg_parse_uint(&arg);
    } else if (arg_match(&arg, &demacroblock_level, argi)) {
      postproc = 1;
      vp8_pp_cfg.post_proc_flag |= VP8_DEMACROBLOCK;
      vp8_pp_cfg.deblocking_level = arg_parse_uint(&arg);
    } else if (arg_match(&arg, &deblock, argi)) {
      postproc = 1;
      vp8_pp_cfg.post_proc_flag |= VP8_DEBLOCK;
    } else if (arg_match(&arg, &mfqe, argi)) {
      postproc = 1;
      vp8_pp_cfg.post_proc_flag |= VP8_MFQE;
    } else if (arg_match(&arg, &pp_debug_info, argi)) {
      unsigned int level = arg_parse_uint(&arg);

      postproc = 1;
      vp8_pp_cfg.post_proc_flag &= ~0x7;

      if (level)
        vp8_pp_cfg.post_proc_flag |= level;
    } else if (arg_match(&arg, &pp_disp_ref_frame, argi)) {
      unsigned int flags = arg_parse_int(&arg);
      if (flags) {
        postproc = 1;
        vp8_dbg_color_ref_frame = flags;
      }
    } else if (arg_match(&arg, &pp_disp_mb_modes, argi)) {
      unsigned int flags = arg_parse_int(&arg);
      if (flags) {
        postproc = 1;
        vp8_dbg_color_mb_modes = flags;
      }
    } else if (arg_match(&arg, &pp_disp_b_modes, argi)) {
      unsigned int flags = arg_parse_int(&arg);
      if (flags) {
        postproc = 1;
        vp8_dbg_color_b_modes = flags;
      }
    } else if (arg_match(&arg, &pp_disp_mvs, argi)) {
      unsigned int flags = arg_parse_int(&arg);
      if (flags) {
        postproc = 1;
        vp8_dbg_display_mv = flags;
      }
    } else if (arg_match(&arg, &error_concealment, argi)) {
      ec_enabled = 1;
    }

#endif
    else
      argj++;
  }

  /* Check for unrecognized options */
  for (argi = argv; *argi; argi++)
    if (argi[0][0] == '-' && strlen(argi[0]) > 1)
      die("Error: Unrecognized option %s\n", *argi);

  /* Handle non-option arguments */
  fn = argv[0];

  if (!fn)
    usage_exit();

  /* Open file */
  infile = strcmp(fn, "-") ? fopen(fn, "rb") : set_binary_mode(stdin);

  if (!infile) {
    fprintf(stderr, "Failed to open file '%s'", strcmp(fn, "-") ? fn : "stdin");
    return EXIT_FAILURE;
  }
#if CONFIG_OS_SUPPORT
  /* Make sure we don't dump to the terminal, unless forced to with -o - */
  if (!outfile_pattern && isatty(fileno(stdout)) && !do_md5 && !noblit) {
    fprintf(stderr,
            "Not dumping raw video to your terminal. Use '-o -' to "
            "override.\n");
    return EXIT_FAILURE;
  }
#endif
  input.vpx_input_ctx->file = infile;
  if (file_is_ivf(input.vpx_input_ctx))
    input.vpx_input_ctx->file_type = FILE_TYPE_IVF;
#if CONFIG_WEBM_IO
  else if (file_is_webm(input.webm_ctx, input.vpx_input_ctx))
    input.vpx_input_ctx->file_type = FILE_TYPE_WEBM;
#endif
  else if (file_is_raw(input.vpx_input_ctx))
    input.vpx_input_ctx->file_type = FILE_TYPE_RAW;
  else {
    fprintf(stderr, "Unrecognized input file type.\n");
#if !CONFIG_WEBM_IO
    fprintf(stderr, "vpxdec was built without WebM container support.\n");
#endif
    return EXIT_FAILURE;
  }

  outfile_pattern = outfile_pattern ? outfile_pattern : "-";
  single_file = is_single_file(outfile_pattern);

  if (!noblit && single_file) {
    generate_filename(outfile_pattern, outfile_name, PATH_MAX,
                      vpx_input_ctx.width, vpx_input_ctx.height, 0);
    if (do_md5)
      MD5Init(&md5_ctx);
    else
      outfile = open_outfile(outfile_name);
  }

  if (use_y4m && !noblit) {
    if (!single_file) {
      fprintf(stderr, "YUV4MPEG2 not supported with output patterns,"
              " try --i420 or --yv12.\n");
      return EXIT_FAILURE;
    }

#if CONFIG_WEBM_IO
    if (vpx_input_ctx.file_type == FILE_TYPE_WEBM) {
      if (webm_guess_framerate(input.webm_ctx, input.vpx_input_ctx)) {
        fprintf(stderr, "Failed to guess framerate -- error parsing "
                "webm file?\n");
        return EXIT_FAILURE;
      }
    }
#endif
  }

  fourcc_interface = get_vpx_decoder_by_fourcc(vpx_input_ctx.fourcc);
  if (interface && fourcc_interface && interface != fourcc_interface)
    warn("Header indicates codec: %s\n", fourcc_interface->name);
  else
    interface = fourcc_interface;

  if (!interface)
    interface = get_vpx_decoder_by_index(0);

  dec_flags = (postproc ? VPX_CODEC_USE_POSTPROC : 0) |
              (ec_enabled ? VPX_CODEC_USE_ERROR_CONCEALMENT : 0);
  if (vpx_codec_dec_init(&decoder, interface->interface(), &cfg, dec_flags)) {
    fprintf(stderr, "Failed to initialize decoder: %s\n",
            vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }

  if (!quiet)
    fprintf(stderr, "%s\n", decoder.name);

#if CONFIG_VP8_DECODER

  if (vp8_pp_cfg.post_proc_flag
      && vpx_codec_control(&decoder, VP8_SET_POSTPROC, &vp8_pp_cfg)) {
    fprintf(stderr, "Failed to configure postproc: %s\n", vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }

  if (vp8_dbg_color_ref_frame
      && vpx_codec_control(&decoder, VP8_SET_DBG_COLOR_REF_FRAME, vp8_dbg_color_ref_frame)) {
    fprintf(stderr, "Failed to configure reference block visualizer: %s\n", vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }

  if (vp8_dbg_color_mb_modes
      && vpx_codec_control(&decoder, VP8_SET_DBG_COLOR_MB_MODES, vp8_dbg_color_mb_modes)) {
    fprintf(stderr, "Failed to configure macro block visualizer: %s\n", vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }

  if (vp8_dbg_color_b_modes
      && vpx_codec_control(&decoder, VP8_SET_DBG_COLOR_B_MODES, vp8_dbg_color_b_modes)) {
    fprintf(stderr, "Failed to configure block visualizer: %s\n", vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }

  if (vp8_dbg_display_mv
      && vpx_codec_control(&decoder, VP8_SET_DBG_DISPLAY_MV, vp8_dbg_display_mv)) {
    fprintf(stderr, "Failed to configure motion vector visualizer: %s\n", vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }
#endif


  if (arg_skip)
    fprintf(stderr, "Skipping first %d frames.\n", arg_skip);
  while (arg_skip) {
    if (read_frame(&input, &buf, &bytes_in_buffer, &buffer_size))
      break;
    arg_skip--;
  }

  if (num_external_frame_buffers > 0) {
    ext_fb_list.num_external_frame_buffers = num_external_frame_buffers;
    ext_fb_list.ext_fb = (struct ExternalFrameBuffer *)calloc(
        num_external_frame_buffers, sizeof(*ext_fb_list.ext_fb));
    if (vpx_codec_set_frame_buffer_functions(
            &decoder, get_vp9_frame_buffer, release_vp9_frame_buffer,
            &ext_fb_list)) {
      fprintf(stderr, "Failed to configure external frame buffers: %s\n",
              vpx_codec_error(&decoder));
      return EXIT_FAILURE;
    }
  }

  frame_avail = 1;
  got_data = 0;

  /* Decode file */
  while (frame_avail || got_data) {
    vpx_codec_iter_t  iter = NULL;
    vpx_image_t    *img;
    struct vpx_usec_timer timer;
    int                   corrupted;

    frame_avail = 0;
    if (!stop_after || frame_in < stop_after) {
      if (!read_frame(&input, &buf, &bytes_in_buffer, &buffer_size)) {
        frame_avail = 1;
        frame_in++;

        vpx_usec_timer_start(&timer);

        if (vpx_codec_decode(&decoder, buf, (unsigned int)bytes_in_buffer,
                             NULL, 0)) {
          const char *detail = vpx_codec_error_detail(&decoder);
          warn("Failed to decode frame %d: %s",
               frame_in, vpx_codec_error(&decoder));

          if (detail)
            warn("Additional information: %s", detail);
          goto fail;
        }

        vpx_usec_timer_mark(&timer);
        dx_time += vpx_usec_timer_elapsed(&timer);
      }
    }

    vpx_usec_timer_start(&timer);

    got_data = 0;
    if ((img = vpx_codec_get_frame(&decoder, &iter))) {
      ++frame_out;
      got_data = 1;
    }

    vpx_usec_timer_mark(&timer);
    dx_time += (unsigned int)vpx_usec_timer_elapsed(&timer);

    if (vpx_codec_control(&decoder, VP8D_GET_FRAME_CORRUPTED, &corrupted)) {
      warn("Failed VP8_GET_FRAME_CORRUPTED: %s", vpx_codec_error(&decoder));
      goto fail;
    }
    frames_corrupted += corrupted;

    if (progress)
      show_progress(frame_in, frame_out, dx_time);

    if (!noblit && img) {
      const int PLANES_YUV[] = {VPX_PLANE_Y, VPX_PLANE_U, VPX_PLANE_V};
      const int PLANES_YVU[] = {VPX_PLANE_Y, VPX_PLANE_V, VPX_PLANE_U};
      const int *planes = flipuv ? PLANES_YVU : PLANES_YUV;

      if (do_scale) {
        if (frame_out == 1) {
          // If the output frames are to be scaled to a fixed display size then
          // use the width and height specified in the container. If either of
          // these is set to 0, use the display size set in the first frame
          // header. If that is unavailable, use the raw decoded size of the
          // first decoded frame.
          int display_width = vpx_input_ctx.width;
          int display_height = vpx_input_ctx.height;
          if (!display_width || !display_height) {
            int display_size[2];
            if (vpx_codec_control(&decoder, VP9D_GET_DISPLAY_SIZE,
                                  display_size)) {
              // As last resort use size of first frame as display size.
              display_width = img->d_w;
              display_height = img->d_h;
            } else {
              display_width = display_size[0];
              display_height = display_size[1];
            }
          }
          scaled_img = vpx_img_alloc(NULL, VPX_IMG_FMT_I420, display_width,
                                     display_height, 16);
        }

        if (img->d_w != scaled_img->d_w || img->d_h != scaled_img->d_h) {
          vpx_image_scale(img, scaled_img, kFilterBox);
          img = scaled_img;
        }
      }

      if (single_file) {
        if (use_y4m) {
          char buf[Y4M_BUFFER_SIZE] = {0};
          size_t len = 0;
          if (frame_out == 1) {
            // Y4M file header
            len = y4m_write_file_header(buf, sizeof(buf),
                                        vpx_input_ctx.width,
                                        vpx_input_ctx.height,
                                        &vpx_input_ctx.framerate, img->fmt);
            if (do_md5) {
              MD5Update(&md5_ctx, (md5byte *)buf, (unsigned int)len);
            } else {
              fputs(buf, outfile);
            }
          }

          // Y4M frame header
          len = y4m_write_frame_header(buf, sizeof(buf));
          if (do_md5) {
            MD5Update(&md5_ctx, (md5byte *)buf, (unsigned int)len);
          } else {
            fputs(buf, outfile);
          }
        }

        if (do_md5) {
          update_image_md5(img, planes, &md5_ctx);
        } else {
          write_image_file(img, planes, outfile);
        }
      } else {
        generate_filename(outfile_pattern, outfile_name, PATH_MAX,
                          img->d_w, img->d_h, frame_in);
        if (do_md5) {
          MD5Init(&md5_ctx);
          update_image_md5(img, planes, &md5_ctx);
          MD5Final(md5_digest, &md5_ctx);
          print_md5(md5_digest, outfile_name);
        } else {
          outfile = open_outfile(outfile_name);
          write_image_file(img, planes, outfile);
          fclose(outfile);
        }
      }
    }

    if (stop_after && frame_in >= stop_after)
      break;
  }

  if (summary || progress) {
    show_progress(frame_in, frame_out, dx_time);
    fprintf(stderr, "\n");
  }

  if (frames_corrupted)
    fprintf(stderr, "WARNING: %d frames corrupted.\n", frames_corrupted);

fail:

  if (vpx_codec_destroy(&decoder)) {
    fprintf(stderr, "Failed to destroy decoder: %s\n",
            vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }

  if (!noblit && single_file) {
    if (do_md5) {
      MD5Final(md5_digest, &md5_ctx);
      print_md5(md5_digest, outfile_name);
    } else {
      fclose(outfile);
    }
  }

#if CONFIG_WEBM_IO
  if (input.vpx_input_ctx->file_type == FILE_TYPE_WEBM)
    webm_free(input.webm_ctx);
#endif

  if (input.vpx_input_ctx->file_type != FILE_TYPE_WEBM)
    free(buf);

  if (scaled_img) vpx_img_free(scaled_img);

  for (i = 0; i < ext_fb_list.num_external_frame_buffers; ++i) {
    free(ext_fb_list.ext_fb[i].data);
  }
  free(ext_fb_list.ext_fb);

  fclose(infile);
  free(argv);

  return frames_corrupted ? EXIT_FAILURE : EXIT_SUCCESS;
}
Ejemplo n.º 21
0
int main(int argc, char* argv[])
{
	if (argc != 7)
	{
		fprintf(stderr, "  Usage: WebMEnc <input filename> <flip> <threads> <bit-rates> <frame-per-second> <output filename>\nExample: WebMEnc frame.%%.5d.tiff 1 8 512 30 frame.webm\n");
		return EXIT_FAILURE;
	}

	ilInit();
	iluInit();

	// Initialize VPX codec.
	//
	vpx_codec_ctx_t vpxContext;
	vpx_codec_enc_cfg_t vpxConfig;

    if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &vpxConfig, 0) != VPX_CODEC_OK)
	{
        return EXIT_FAILURE;
    }

	// Try to load the first frame to initialize width and height.
	//
	int flip = (bool)atoi(argv[2]);

	vpx_image_t *rgbImage = NULL, *yv12Image = NULL;
	if (readImage(argv[1], 0, &rgbImage, &yv12Image, flip) == false)
	{
		return EXIT_FAILURE;
	}
	vpxConfig.g_h = yv12Image->h;
	vpxConfig.g_w = yv12Image->w;

	vpxConfig.g_threads = atoi(argv[3]);

	vpxConfig.rc_target_bitrate = atoi(argv[4]);

	vpxConfig.g_timebase.den = atoi(argv[5]);
	vpxConfig.g_timebase.num = 1;

	// Prepare the output .webm file.
	//
	EbmlGlobal ebml;
	memset(&ebml, 0, sizeof(EbmlGlobal));
	ebml.last_pts_ms = -1;
	ebml.stream = fopen(argv[6], "wb");
	if (ebml.stream == NULL)
	{
		return EXIT_FAILURE;
	}
	vpx_rational ebmlFPS = vpxConfig.g_timebase;
	struct vpx_rational arg_framerate = {atoi(argv[5]), 1};
	Ebml_WriteWebMFileHeader(&ebml, &vpxConfig, &arg_framerate);

	unsigned long duration = (float)arg_framerate.den / (float)arg_framerate.num * 1000;

	if (vpx_codec_enc_init(&vpxContext, vpx_codec_vp8_cx(), &vpxConfig, 0) != VPX_CODEC_OK)
	{
        return EXIT_FAILURE;
    }

	//
	fprintf(stdout, "input=%s\nflip=%s\nthreads=%s\nbps=%s\nfps=%s\noutput=%s\n", argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);

	
	// Reading image file sequence, encoding to .WebM file.
	//
	int frameNumber = 0;
	while(readImage(argv[1], frameNumber, &rgbImage, &yv12Image, flip))
	{
		vpx_codec_err_t vpxError = vpx_codec_encode(&vpxContext, yv12Image, frameNumber, duration, 0, 0);
		if (vpxError != VPX_CODEC_OK)
		{
			return EXIT_FAILURE;
		}
		
		vpx_codec_iter_t iter = NULL;
		const vpx_codec_cx_pkt_t *packet;
		while( (packet = vpx_codec_get_cx_data(&vpxContext, &iter)) )
		{
			Ebml_WriteWebMBlock(&ebml, &vpxConfig, packet);
		}

		frameNumber ++;
		printf("Processed %d frames.\r", frameNumber);

		vpx_img_free(yv12Image);
		yv12Image = NULL;
	}

	Ebml_WriteWebMFileFooter(&ebml, 0);
	fclose(ebml.stream);

	vpx_codec_destroy(&vpxContext);

	return EXIT_SUCCESS;
}
Ejemplo n.º 22
0
void VideoProgress::onNextFrame() {
  size_t maxFrames = encoder->fpsValue() * durationSeconds;

  renderSurface->renderNow(frameCount/float(encoder->fpsValue()));
  QImage image = renderSurface->getImage();

  //if (!frameCount || frameCount % 3 == 0) {
    QImage preview = image.scaled(320,200,Qt::KeepAspectRatio,Qt::FastTransformation);
    frame->setPixmap(QPixmap::fromImage(preview));
  //}

  image = image.convertToFormat(QImage::Format_RGB888);

  vpx_image_t* vpxImage = vpx_img_alloc(NULL, VPX_IMG_FMT_RGB24, renderSurface->width(), renderSurface->height(), 1);
  if (!vpxImage) {
    finishCapture("An error occured during capture: Out of memory.");
    return;
  }

  size_t size = renderSurface->width() * renderSurface->height() * 3;
  if (size != size_t(image.byteCount())) {
    finishCapture("An error occured during capture: Image size error.");
    vpx_img_free(vpxImage);
    return;
  }

  memcpy(vpxImage->img_data, image.bits(), size);

  if (!encoder->writeFrame(vpxImage)) {
    finishCapture("An error occured during capture: Frame write error.");
    vpx_img_free(vpxImage);
    return;
  }

  if (vpxImage) {
    vpx_img_free(vpxImage);
  }

  bool complete = false;
  if (++frameCount == maxFrames) {
    finalFrame = true;
    complete = true;
  } else {
    bar->setValue(frameCount/float(maxFrames)*100);
  }

  if (finalFrame) {
    bar->setValue(100);
    encoder->finish();
    delete encoder;
    if (complete) {
      finishCapture("The capture is complete.");
    } else {
      finishCapture("The capture was canceled.");
    }
  }

  if (!finalFrame) {
    QTimer::singleShot(0, this, SLOT(onNextFrame()));
  }
}
Ejemplo n.º 23
0
int main(int argc, char **argv) {
  FILE *infile = NULL;
  int w, h;
  vpx_codec_ctx_t codec;
  vpx_codec_enc_cfg_t cfg;
  vpx_image_t raw;
  vpx_codec_err_t res;
  vpx_fixed_buf_t stats;

  const VpxInterface *encoder = NULL;
  const int fps = 30;       // TODO(dkovalev) add command line argument
  const int bitrate = 200;  // kbit/s TODO(dkovalev) add command line argument
  const char *const codec_arg = argv[1];
  const char *const width_arg = argv[2];
  const char *const height_arg = argv[3];
  const char *const infile_arg = argv[4];
  const char *const outfile_arg = argv[5];
  exec_name = argv[0];

  if (argc != 6) die("Invalid number of arguments.");

  encoder = get_vpx_encoder_by_name(codec_arg);
  if (!encoder) die("Unsupported codec.");

  w = strtol(width_arg, NULL, 0);
  h = strtol(height_arg, NULL, 0);

  if (w <= 0 || h <= 0 || (w % 2) != 0 || (h % 2) != 0)
    die("Invalid frame size: %dx%d", w, h);

  if (!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, w, h, 1))
    die("Failed to allocate image", w, h);

  printf("Using %s\n", vpx_codec_iface_name(encoder->codec_interface()));

  // Configuration
  res = vpx_codec_enc_config_default(encoder->codec_interface(), &cfg, 0);
  if (res) die_codec(&codec, "Failed to get default codec config.");

  cfg.g_w = w;
  cfg.g_h = h;
  cfg.g_timebase.num = 1;
  cfg.g_timebase.den = fps;
  cfg.rc_target_bitrate = bitrate;

  if (!(infile = fopen(infile_arg, "rb")))
    die("Failed to open %s for reading", infile_arg);

  // Pass 0
  cfg.g_pass = VPX_RC_FIRST_PASS;
  stats = pass0(&raw, infile, encoder, &cfg);

  // Pass 1
  rewind(infile);
  cfg.g_pass = VPX_RC_LAST_PASS;
  cfg.rc_twopass_stats_in = stats;
  pass1(&raw, infile, outfile_arg, encoder, &cfg);
  free(stats.buf);

  vpx_img_free(&raw);
  fclose(infile);

  return EXIT_SUCCESS;
}
Ejemplo n.º 24
0
int main(int argc, char **argv) {
    FILE                *infile, *outfile;
    char infile_path[] = "vsamples/HIMOTION4_3_30_VGA.yuv";
    char outfile_path[] = "vsamples/ivf_test1.ivf";
    vpx_codec_ctx_t      codec;
    vpx_codec_enc_cfg_t  cfg;
    int                  frame_cnt = 0;
    vpx_image_t          raw;
    vpx_codec_err_t      res;
    long                 width=640;
    long                 height=480;
    int                  frame_avail;
    int                  got_data;
    int                  flags = 0;
    
    /* Open files */
    //if(argc!=5)
    //    die("Usage: %s <width> <height> <infile> <outfile>\n", argv[0]);
    //width = strtol(argv[1], NULL, 0);
    //height = strtol(argv[2], NULL, 0);
    if(width < 16 || width%2 || height <16 || height%2)
        die("Invalid resolution: %ldx%ld", width, height);
    if(!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, width, height, 1))
        die("Faile to allocate image", width, height);
    if(!(outfile = fopen(outfile_path, "wb")))
        die("Failed to open %s for writing", outfile_path);
    
    printf("Using %s\n",vpx_codec_iface_name(interface));
    
    /* Populate encoder configuration */                                      //
    res = vpx_codec_enc_config_default(interface, &cfg, 0);                   //
    if(res) {                                                                 //
        printf("Failed to get config: %s\n", vpx_codec_err_to_string(res));   //
        return EXIT_FAILURE;                                                  //
    }                                                                         //
    
    /* Update the default configuration with our settings */                  //                          //
    cfg.g_w = width;                                                          //
    cfg.g_h = height;
    cfg.rc_target_bitrate = 800;
    cfg.rc_buf_sz = 300;
    cfg.rc_overshoot_pct = 1000;
    cfg.g_pass = VPX_RC_ONE_PASS;

    
    write_ivf_file_header(outfile, &cfg, 0);
    
    
    /* Open input file for this encoding pass */
    if(!(infile = fopen(infile_path, "rb")))
        die("Failed to open %s for reading", infile_path);
    
    /* Initialize codec */                                                //
    if(vpx_codec_enc_init(&codec, interface, &cfg, 0))                    //
        die_codec(&codec, "Failed to initialize encoder");                //
    
    frame_avail = 1;
    got_data = 0;
    while(frame_avail || got_data) {
        vpx_codec_iter_t iter = NULL;
        const vpx_codec_cx_pkt_t *pkt;
        
        frame_avail = read_frame(infile, &raw);                           //
        if(vpx_codec_encode(&codec, frame_avail? &raw : NULL, frame_cnt,  //
                            1, flags, VPX_DL_REALTIME))                   //
            die_codec(&codec, "Failed to encode frame");                  //
        got_data = 0;
        while( (pkt = vpx_codec_get_cx_data(&codec, &iter)) ) {
            got_data = 1;
            switch(pkt->kind) {
                case VPX_CODEC_CX_FRAME_PKT:                                  //
                    write_ivf_frame_header(outfile, pkt);                     //
                    (void) fwrite(pkt->data.frame.buf, 1, pkt->data.frame.sz, //
                                  outfile);                                   //
                    break;                                                    //
                default:
                    break;
            }
            printf(pkt->kind == VPX_CODEC_CX_FRAME_PKT
                   && (pkt->data.frame.flags & VPX_FRAME_IS_KEY)? "K":".");
            fflush(stdout);
        }
        frame_cnt++;
    }
    printf("\n");
    fclose(infile);
    
    printf("Processed %d frames.\n",frame_cnt-1);
    vpx_img_free(&raw);                                                       //
    if(vpx_codec_destroy(&codec))                                             //
        die_codec(&codec, "Failed to destroy codec");                         //
    
    /* Try to rewrite the file header with the actual frame count */
    if(!fseek(outfile, 0, SEEK_SET))
        write_ivf_file_header(outfile, &cfg, frame_cnt-1);
    fclose(outfile);
    return EXIT_SUCCESS;
}
Ejemplo n.º 25
0
void cs_do(CSSession *cs)
{
    /* Codec session should always be protected by call mutex so no need to check for cs validity
     */

    if (!cs) return;

    Payload *p;
    int rc;

    int success = 0;

    pthread_mutex_lock(cs->queue_mutex);
    RTPMessage *msg;

    while ((msg = jbuf_read(cs->j_buf, &success)) || success == 2) {
        pthread_mutex_unlock(cs->queue_mutex);

        uint16_t fsize = ((cs->audio_decoder_sample_rate * cs->audio_decoder_frame_duration) / 1000);
        int16_t tmp[fsize * cs->audio_decoder_channels];

        if (success == 2) {
            rc = opus_decode(cs->audio_decoder, 0, 0, tmp, fsize, 1);
        } else {
            rc = opus_decode(cs->audio_decoder, msg->data, msg->length, tmp, fsize, 0);
            rtp_free_msg(NULL, msg);
        }

        if (rc < 0) {
            LOGGER_WARNING("Decoding error: %s", opus_strerror(rc));
        } else if (cs->acb.first) {
            /* Play */
            cs->acb.first(cs->agent, cs->call_idx, tmp, rc, cs->acb.second);
        }

        pthread_mutex_lock(cs->queue_mutex);
    }

    if (cs->vbuf_raw && !buffer_empty(cs->vbuf_raw)) {
        /* Decode video */
        buffer_read(cs->vbuf_raw, &p);

        /* Leave space for (possibly) other thread to queue more data after we read it here */
        pthread_mutex_unlock(cs->queue_mutex);

        rc = vpx_codec_decode(&cs->v_decoder, p->data, p->size, NULL, MAX_DECODE_TIME_US);
        free(p);

        if (rc != VPX_CODEC_OK) {
            LOGGER_ERROR("Error decoding video: %s", vpx_codec_err_to_string(rc));
        } else {
            vpx_codec_iter_t iter = NULL;
            vpx_image_t *dest = vpx_codec_get_frame(&cs->v_decoder, &iter);

            /* Play decoded images */
            for (; dest; dest = vpx_codec_get_frame(&cs->v_decoder, &iter)) {
                if (cs->vcb.first)
                    cs->vcb.first(cs->agent, cs->call_idx, dest, cs->vcb.second);

                vpx_img_free(dest);
            }
        }

        return;
    }

    pthread_mutex_unlock(cs->queue_mutex);
}
Ejemplo n.º 26
0
void krad_vpx_decoder_destroy (krad_vpx_decoder_t *kradvpx) {

	vpx_codec_destroy (&kradvpx->decoder);
	vpx_img_free (kradvpx->img);
	free (kradvpx);
}
Ejemplo n.º 27
0
int main(int argc, char **argv) {
  FILE *infile = NULL;
  vpx_codec_ctx_t codec;
  vpx_codec_enc_cfg_t cfg;
  int frame_count = 0;
  vpx_image_t raw;
  vpx_codec_err_t res;
  VpxVideoInfo info;
  VpxVideoWriter *writer = NULL;
  const VpxInterface *encoder = NULL;
  const int fps = 2;        // TODO(dkovalev) add command line argument
  const double bits_per_pixel_per_frame = 0.067;

  exec_name = argv[0];
  if (argc != 6)
    die("Invalid number of arguments");

  memset(&info, 0, sizeof(info));

  encoder = get_vpx_encoder_by_name(argv[1]);
  if (encoder == NULL) {
    die("Unsupported codec.");
  }
  assert(encoder != NULL);
  info.codec_fourcc = encoder->fourcc;
  info.frame_width = strtol(argv[2], NULL, 0);
  info.frame_height = strtol(argv[3], NULL, 0);
  info.time_base.numerator = 1;
  info.time_base.denominator = fps;

  if (info.frame_width <= 0 ||
      info.frame_height <= 0 ||
      (info.frame_width % 2) != 0 ||
      (info.frame_height % 2) != 0) {
    die("Invalid frame size: %dx%d", info.frame_width, info.frame_height);
  }

  if (!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, info.frame_width,
                                             info.frame_height, 1)) {
    die("Failed to allocate image.");
  }

  printf("Using %s\n", vpx_codec_iface_name(encoder->codec_interface()));

  res = vpx_codec_enc_config_default(encoder->codec_interface(), &cfg, 0);
  if (res)
    die_codec(&codec, "Failed to get default codec config.");

  cfg.g_w = info.frame_width;
  cfg.g_h = info.frame_height;
  cfg.g_timebase.num = info.time_base.numerator;
  cfg.g_timebase.den = info.time_base.denominator;
  cfg.rc_target_bitrate = (unsigned int)(bits_per_pixel_per_frame * cfg.g_w *
                                         cfg.g_h * fps / 1000);
  cfg.g_lag_in_frames = 0;

  writer = vpx_video_writer_open(argv[5], kContainerIVF, &info);
  if (!writer)
    die("Failed to open %s for writing.", argv[5]);

  if (!(infile = fopen(argv[4], "rb")))
    die("Failed to open %s for reading.", argv[4]);

  if (vpx_codec_enc_init(&codec, encoder->codec_interface(), &cfg, 0))
    die_codec(&codec, "Failed to initialize encoder");

  // Encode frames.
  while (vpx_img_read(&raw, infile)) {
    ++frame_count;

    if (frame_count == 22 && encoder->fourcc == VP8_FOURCC) {
      set_roi_map(&cfg, &codec);
    } else if (frame_count == 33) {
      set_active_map(&cfg, &codec);
    } else if (frame_count == 44) {
      unset_active_map(&cfg, &codec);
    }

    encode_frame(&codec, &raw, frame_count, writer);
  }

  // Flush encoder.
  while (encode_frame(&codec, NULL, -1, writer)) {}

  printf("\n");
  fclose(infile);
  printf("Processed %d frames.\n", frame_count);

  vpx_img_free(&raw);
  if (vpx_codec_destroy(&codec))
    die_codec(&codec, "Failed to destroy codec.");

  vpx_video_writer_close(writer);

  return EXIT_SUCCESS;
}
Ejemplo n.º 28
0
void vp8Encoder::destory()
{
    LOGD("vp8Encoder::destory()");
    vpx_codec_destroy(&encoder);
    vpx_img_free(&raw);
}
Ejemplo n.º 29
0
int main(int argc, char **argv) {
    FILE                *infile, *outfile;
    vpx_codec_ctx_t      codec;
    vpx_codec_enc_cfg_t  cfg;
    int                  frame_cnt = 0;
    vpx_image_t          raw;
    vpx_codec_err_t      res;
    long                 width;
    long                 height;
    int                  frame_avail;
    int                  got_data;
    int                  flags = 0;
    int                  update_frame_num = 0;

    /* Open files */
    if(argc!=6)
        die("Usage: %s <width> <height> <infile> <outfile> <frame>\n",
            argv[0]);

        update_frame_num = atoi(argv[5]);
        if(!update_frame_num)
            die("Couldn't parse frame number '%s'\n", argv[5]);

    width = strtol(argv[1], NULL, 0);
    height = strtol(argv[2], NULL, 0);
    if(width < 16 || width%2 || height <16 || height%2)
        die("Invalid resolution: %ldx%ld", width, height);
    if(!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, width, height, 1))
        die("Faile to allocate image", width, height);
    if(!(outfile = fopen(argv[4], "wb")))
        die("Failed to open %s for writing", argv[4]);

    printf("Using %s\n",vpx_codec_iface_name(interface));

    /* Populate encoder configuration */
    res = vpx_codec_enc_config_default(interface, &cfg, 0);
    if(res) {
        printf("Failed to get config: %s\n", vpx_codec_err_to_string(res));
        return EXIT_FAILURE;
    }

    /* Update the default configuration with our settings */
    cfg.rc_target_bitrate = width * height * cfg.rc_target_bitrate
                            / cfg.g_w / cfg.g_h;
    cfg.g_w = width;
    cfg.g_h = height;

    write_ivf_file_header(outfile, &cfg, 0);


        /* Open input file for this encoding pass */
        if(!(infile = fopen(argv[3], "rb")))
            die("Failed to open %s for reading", argv[3]);

        /* Initialize codec */
        if(vpx_codec_enc_init(&codec, interface, &cfg, 0))
            die_codec(&codec, "Failed to initialize encoder");

        frame_avail = 1;
        got_data = 0;
        while(frame_avail || got_data) {
            vpx_codec_iter_t iter = NULL;
            const vpx_codec_cx_pkt_t *pkt;

            frame_avail = read_frame(infile, &raw);

            if(frame_cnt + 1 == update_frame_num) {
                vpx_ref_frame_t ref;

                ref.frame_type = VP8_LAST_FRAME;
                ref.img        = raw;

                if(vpx_codec_control(&codec, VP8_SET_REFERENCE, &ref))
                    die_codec(&codec, "Failed to set reference frame");
            }

            if(vpx_codec_encode(&codec, frame_avail? &raw : NULL, frame_cnt,
                                1, flags, VPX_DL_REALTIME))
                die_codec(&codec, "Failed to encode frame");
            got_data = 0;
            while( (pkt = vpx_codec_get_cx_data(&codec, &iter)) ) {
                got_data = 1;
                switch(pkt->kind) {
                case VPX_CODEC_CX_FRAME_PKT:
                    write_ivf_frame_header(outfile, pkt);
                    (void) fwrite(pkt->data.frame.buf, 1, pkt->data.frame.sz,
                                  outfile);
                    break;
                default:
                    break;
                }
                printf(pkt->kind == VPX_CODEC_CX_FRAME_PKT
                       && (pkt->data.frame.flags & VPX_FRAME_IS_KEY)? "K":".");
                fflush(stdout);
            }
            frame_cnt++;
        }
        printf("\n");
        fclose(infile);

    printf("Processed %d frames.\n",frame_cnt-1);
    vpx_img_free(&raw);
    if(vpx_codec_destroy(&codec))
        die_codec(&codec, "Failed to destroy codec");

    /* Try to rewrite the file header with the actual frame count */
    if(!fseek(outfile, 0, SEEK_SET))
        write_ivf_file_header(outfile, &cfg, frame_cnt-1);
    fclose(outfile);
    return EXIT_SUCCESS;
}
Ejemplo n.º 30
-2
int main(int argc, char **argv) {
  FILE *infile = NULL;
  vpx_codec_ctx_t codec;
  vpx_codec_enc_cfg_t cfg;
  int frame_count = 0;
  vpx_image_t raw;
  vpx_codec_err_t res;
  VpxVideoInfo info;
  VpxVideoWriter *writer = NULL;
  const VpxInterface *encoder = NULL;
  int update_frame_num = 0;
  const int fps = 30;       // TODO(dkovalev) add command line argument
  const int bitrate = 200;  // kbit/s TODO(dkovalev) add command line argument

  vp8_zero(codec);
  vp8_zero(cfg);
  vp8_zero(info);

  exec_name = argv[0];

  if (argc != 6) die("Invalid number of arguments");

  // TODO(dkovalev): add vp9 support and rename the file accordingly
  encoder = get_vpx_encoder_by_name("vp8");
  if (!encoder) die("Unsupported codec.");

  update_frame_num = atoi(argv[5]);
  if (!update_frame_num) die("Couldn't parse frame number '%s'\n", argv[5]);

  info.codec_fourcc = encoder->fourcc;
  info.frame_width = (int)strtol(argv[1], NULL, 0);
  info.frame_height = (int)strtol(argv[2], NULL, 0);
  info.time_base.numerator = 1;
  info.time_base.denominator = fps;

  if (info.frame_width <= 0 || info.frame_height <= 0 ||
      (info.frame_width % 2) != 0 || (info.frame_height % 2) != 0) {
    die("Invalid frame size: %dx%d", info.frame_width, info.frame_height);
  }

  if (!vpx_img_alloc(&raw, VPX_IMG_FMT_I420, info.frame_width,
                     info.frame_height, 1)) {
    die("Failed to allocate image.");
  }

  printf("Using %s\n", vpx_codec_iface_name(encoder->codec_interface()));

  res = vpx_codec_enc_config_default(encoder->codec_interface(), &cfg, 0);
  if (res) die_codec(&codec, "Failed to get default codec config.");

  cfg.g_w = info.frame_width;
  cfg.g_h = info.frame_height;
  cfg.g_timebase.num = info.time_base.numerator;
  cfg.g_timebase.den = info.time_base.denominator;
  cfg.rc_target_bitrate = bitrate;

  writer = vpx_video_writer_open(argv[4], kContainerIVF, &info);
  if (!writer) die("Failed to open %s for writing.", argv[4]);

  if (!(infile = fopen(argv[3], "rb")))
    die("Failed to open %s for reading.", argv[3]);

  if (vpx_codec_enc_init(&codec, encoder->codec_interface(), &cfg, 0))
    die_codec(&codec, "Failed to initialize encoder");

  // Encode frames.
  while (vpx_img_read(&raw, infile)) {
    if (frame_count + 1 == update_frame_num) {
      vpx_ref_frame_t ref;
      ref.frame_type = VP8_LAST_FRAME;
      ref.img = raw;
      if (vpx_codec_control(&codec, VP8_SET_REFERENCE, &ref))
        die_codec(&codec, "Failed to set reference frame");
    }

    encode_frame(&codec, &raw, frame_count++, writer);
  }

  // Flush encoder.
  while (encode_frame(&codec, NULL, -1, writer)) {
  }

  printf("\n");
  fclose(infile);
  printf("Processed %d frames.\n", frame_count);

  vpx_img_free(&raw);
  if (vpx_codec_destroy(&codec)) die_codec(&codec, "Failed to destroy codec.");

  vpx_video_writer_close(writer);

  return EXIT_SUCCESS;
}