Example #1
0
  virtual void InitEncode (const GMPVideoCodec& codecSettings,
                           const uint8_t* aCodecSpecific,
                           uint32_t aCodecSpecificSize,
                           GMPVideoEncoderCallback* callback,
                           int32_t numberOfCores,
                           uint32_t maxPayloadSize) {
    callback_ = callback;

    GMPErr err = g_platform_api->createthread (&worker_thread_);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Couldn't create new thread");
      Error (GMPGenericErr);
      return;
    }

    int rv = WelsCreateSVCEncoder (&encoder_);
    if (rv) {
      Error (GMPGenericErr);
      return;
    }

    SEncParamBase param;
    memset (&param, 0, sizeof (param));

    GMPLOG (GL_INFO, "Initializing encoder at "
            << codecSettings.mWidth
            << "x"
            << codecSettings.mHeight
            << "@"
            << static_cast<int> (codecSettings.mMaxFramerate)
            << "max payload size="
            << maxPayloadSize);

    // Translate parameters.
    param.iUsageType = CAMERA_VIDEO_REAL_TIME;
    param.iPicWidth = codecSettings.mWidth;
    param.iPicHeight = codecSettings.mHeight;
    param.iTargetBitrate = codecSettings.mStartBitrate * 1000;
    GMPLOG (GL_INFO, "Initializing Bit Rate at: Start: "
            << codecSettings.mStartBitrate
            << "; Min: "
            << codecSettings.mMinBitrate
            << "; Max: "
            << codecSettings.mMaxBitrate);
    param.iRCMode = RC_BITRATE_MODE;

    // TODO([email protected]). Scary conversion from unsigned char to float below.
    param.fMaxFrameRate = static_cast<float> (codecSettings.mMaxFramerate);

    rv = encoder_->Initialize (&param);
    if (rv) {
      GMPLOG (GL_ERROR, "Couldn't initialize encoder");
      Error (GMPGenericErr);
      return;
    }

    max_payload_size_ = maxPayloadSize;

    GMPLOG (GL_INFO, "Initialized encoder");
  }
Example #2
0
MSOpenH264Encoder::MSOpenH264Encoder(MSFilter *f)
	: mFilter(f), mPacker(0), mPacketisationMode(0), mVConfList(openh264_conf_list), mFrameCount(0), mInitialized(false)
{
	mVConf = ms_video_find_best_configuration_for_bitrate(mVConfList, 384000,ms_get_cpu_count());

	long ret = WelsCreateSVCEncoder(&mEncoder);
	if (ret != 0) {
		ms_error("OpenH264 encoder: Failed to create encoder: %li", ret);
	}
}
Example #3
0
//start the encoder
bool edk::codecs::EncoderH264::startEncoder(edk::size2ui32 size, edk::uint32 fps){
    if(edk::codecs::EncoderVideo::startEncoder(size, fps)){
        //start the encoder
#ifdef EDK_USE_OPENH264
        if(!WelsCreateSVCEncoder(&this->encoder)){
            if(this->encoder){
                //
                //start the parameters
                this->param.iUsageType = CAMERA_VIDEO_REAL_TIME;
                this->param.iPicWidth = size.width;
                this->param.iPicHeight = size.height;
                this->param.iTargetBitrate = rand() + 1; // !=0
                // Force a bitrate of at least w*h/50, otherwise we will only get skipped frames
                this->param.iTargetBitrate = WELS_CLIP3 (this->param.iTargetBitrate,
                                                         this->param.iPicWidth * this->param.iPicHeight / 50, 100000000);
                int32_t iLevelMaxBitrate = WelsCommon::g_ksLevelLimits[LEVEL_5_0 - 1].uiMaxBR * CpbBrNalFactor;
                if (this->param.iTargetBitrate > iLevelMaxBitrate)
                    this->param.iTargetBitrate = iLevelMaxBitrate;

                this->param.iRCMode = RC_BITRATE_MODE;
                //use the FPS
                this->param.fMaxFrameRate = fps; //!=0

                //start the encoder
                if(!encoder->Initialize(&param)){
                    this->setNextkeyframe();
                    return true;
                }
            }
        }
#else
        edk_printDebug("You must define EDK_USE_OPENH264 before use");
#endif
        //finish encoder
        edk::codecs::EncoderVideo::finishEncoder();
    }
    return false;
}
Example #4
0
  virtual void InitEncode (const GMPVideoCodec& codecSettings,
                           const uint8_t* aCodecSpecific,
                           uint32_t aCodecSpecificSize,
                           GMPVideoEncoderCallback* callback,
                           int32_t numberOfCores,
                           uint32_t maxPayloadSize) {
    callback_ = callback;

    GMPErr err = g_platform_api->createthread (&worker_thread_);
    if (err != GMPNoErr) {
      GMPLOG (GL_ERROR, "Couldn't create new thread");
      Error (GMPGenericErr);
      return;
    }

    int rv = WelsCreateSVCEncoder (&encoder_);
    if (rv) {
      Error (GMPGenericErr);
      return;
    }
    SEncParamExt param;
    memset (&param, 0, sizeof (param));
    encoder_->GetDefaultParams (&param);

    GMPLOG (GL_INFO, "Initializing encoder at "
            << codecSettings.mWidth
            << "x"
            << codecSettings.mHeight
            << "@"
            << static_cast<int> (codecSettings.mMaxFramerate));

    // Translate parameters.
    param.iUsageType = CAMERA_VIDEO_REAL_TIME;
    if(codecSettings.mMode == kGMPScreensharing)
      param.iUsageType = SCREEN_CONTENT_REAL_TIME;
    param.iPicWidth = codecSettings.mWidth;
    param.iPicHeight = codecSettings.mHeight;
    param.iRCMode = RC_BITRATE_MODE;
    param.iTargetBitrate = codecSettings.mStartBitrate * 1000;
    param.iMaxBitrate = codecSettings.mMaxBitrate * 1000;
    GMPLOG (GL_INFO, "Initializing Bit Rate at: Start: "
            << codecSettings.mStartBitrate
            << "; Min: "
            << codecSettings.mMinBitrate
            << "; Max: "
            << codecSettings.mMaxBitrate
            << "; Max payload size:"
            << maxPayloadSize);

    param.uiMaxNalSize = maxPayloadSize;

    // TODO([email protected]). Scary conversion from unsigned char to float below.
    param.fMaxFrameRate = static_cast<float> (codecSettings.mMaxFramerate);

    // Set up layers. Currently we have one layer.
    SSpatialLayerConfig* layer = &param.sSpatialLayers[0];

    // Make sure the output resolution doesn't exceed the Openh264 capability
    double width_mb = std::ceil(codecSettings.mWidth/16.0);
    double height_mb = std::ceil(codecSettings.mHeight/16.0);
    double input_mb = width_mb * height_mb;
    if (static_cast<uint32_t>(input_mb) > OPENH264_MAX_MB) {
      double scale = std::sqrt(OPENH264_MAX_MB / input_mb);
      layer->iVideoWidth = static_cast<uint32_t>(width_mb * 16 * scale);
      layer->iVideoHeight = static_cast<uint32_t>(height_mb * 16 * scale);
      GMPLOG (GL_INFO, "InitEncode: the output resolution overflows, w x h = " << codecSettings.mWidth << " x " << codecSettings.mHeight
              << ", turned to be " << layer->iVideoWidth << " x " << layer->iVideoHeight);
    } else {
      layer->iVideoWidth = codecSettings.mWidth;
      layer->iVideoHeight = codecSettings.mHeight;
    }
    if (layer->iVideoWidth < 16) {
      layer->iVideoWidth = 16;
    }
    if (layer->iVideoHeight < 16) {
      layer->iVideoHeight = 16;
    }

    layer->fFrameRate = param.fMaxFrameRate;
    layer->iSpatialBitrate = param.iTargetBitrate;
    layer->iMaxSpatialBitrate = param.iMaxBitrate;

    //for controlling the NAL size (normally for packetization-mode=0)
    if (maxPayloadSize != 0) {
      layer->sSliceArgument.uiSliceMode = SM_SIZELIMITED_SLICE;
      layer->sSliceArgument.uiSliceSizeConstraint = maxPayloadSize;
    }
    rv = encoder_->InitializeExt (&param);
    if (rv) {
      GMPLOG (GL_ERROR, "Couldn't initialize encoder");
      Error (GMPGenericErr);
      return;
    }
    max_payload_size_ = maxPayloadSize;
    GMPLOG (GL_INFO, "Initialized encoder");
  }
Example #5
0
static BOOL openh264_init(H264_CONTEXT* h264)
{
	long status;
	SDecodingParam sDecParam;
	H264_CONTEXT_OPENH264* sys;
	static int traceLevel = WELS_LOG_DEBUG;
	static EVideoFormatType videoFormat = videoFormatI420;
	static WelsTraceCallback traceCallback = (WelsTraceCallback) openh264_trace_callback;

	sys = (H264_CONTEXT_OPENH264*) calloc(1, sizeof(H264_CONTEXT_OPENH264));

	if (!sys)
	{
		goto EXCEPTION;
	}

	h264->pSystemData = (void*) sys;

	if (h264->Compressor)
	{
		WelsCreateSVCEncoder(&sys->pEncoder);

		if (!sys->pEncoder)
		{
			WLog_ERR(TAG, "Failed to create OpenH264 encoder");
			goto EXCEPTION;
		}
	}
	else
	{
		WelsCreateDecoder(&sys->pDecoder);

		if (!sys->pDecoder)
		{
			WLog_ERR(TAG, "Failed to create OpenH264 decoder");
			goto EXCEPTION;
		}

		ZeroMemory(&sDecParam, sizeof(sDecParam));
		sDecParam.eOutputColorFormat  = videoFormatI420;
		sDecParam.eEcActiveIdc = ERROR_CON_FRAME_COPY;
		sDecParam.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;

		status = (*sys->pDecoder)->Initialize(sys->pDecoder, &sDecParam);

		if (status != 0)
		{
			WLog_ERR(TAG, "Failed to initialize OpenH264 decoder (status=%ld)", status);
			goto EXCEPTION;
		}

		status = (*sys->pDecoder)->SetOption(sys->pDecoder, DECODER_OPTION_DATAFORMAT, &videoFormat);

		if (status != 0)
		{
			WLog_ERR(TAG, "Failed to set data format option on OpenH264 decoder (status=%ld)", status);
		}

		if (g_openh264_trace_enabled)
		{
			status = (*sys->pDecoder)->SetOption(sys->pDecoder, DECODER_OPTION_TRACE_LEVEL, &traceLevel);

			if (status != 0)
			{
				WLog_ERR(TAG, "Failed to set trace level option on OpenH264 decoder (status=%ld)", status);
			}

			status = (*sys->pDecoder)->SetOption(sys->pDecoder, DECODER_OPTION_TRACE_CALLBACK, &traceCallback);

			if (status != 0)
			{
				WLog_ERR(TAG, "Failed to set trace callback option on OpenH264 decoder (status=%ld)", status);
			}

			status = (*sys->pDecoder)->SetOption(sys->pDecoder, DECODER_OPTION_TRACE_CALLBACK_CONTEXT, &h264);

			if (status != 0)
			{
				WLog_ERR(TAG, "Failed to set trace callback context option on OpenH264 decoder (status=%ld)", status);
			}
		}
	}

	return TRUE;

EXCEPTION:
	openh264_uninit(h264);

	return FALSE;
}
OpenH264EncoderImpl::OpenH264EncoderImpl() :
		initialized(false), layerIndex(0), nalIndex(0) {
	WelsCreateSVCEncoder(&isvcEncoder);
	isvcEncoder->GetDefaultParams(&param);
}
static gboolean
gst_openh264enc_set_format (GstVideoEncoder * encoder,
    GstVideoCodecState * state)
{
  GstOpenh264Enc *openh264enc = GST_OPENH264ENC (encoder);
  gchar *debug_caps;
  guint width, height, fps_n, fps_d;
  SEncParamExt enc_params;
  SliceModeEnum slice_mode = SM_SINGLE_SLICE;
  guint n_slices = 1;
  gint ret;
  GstCaps *outcaps;
  GstVideoCodecState *output_state;
  openh264enc->frame_count = 0;
  int video_format = videoFormatI420;

  debug_caps = gst_caps_to_string (state->caps);
  GST_DEBUG_OBJECT (openh264enc, "gst_e26d4_enc_set_format called, caps: %s",
      debug_caps);
  g_free (debug_caps);

  gst_openh264enc_stop (encoder);

  if (openh264enc->input_state) {
    gst_video_codec_state_unref (openh264enc->input_state);
  }
  openh264enc->input_state = gst_video_codec_state_ref (state);

  width = GST_VIDEO_INFO_WIDTH (&state->info);
  height = GST_VIDEO_INFO_HEIGHT (&state->info);
  fps_n = GST_VIDEO_INFO_FPS_N (&state->info);
  fps_d = GST_VIDEO_INFO_FPS_D (&state->info);

  if (openh264enc->encoder != NULL) {
    openh264enc->encoder->Uninitialize ();
    WelsDestroySVCEncoder (openh264enc->encoder);
    openh264enc->encoder = NULL;
  }
  WelsCreateSVCEncoder (&openh264enc->encoder);
  unsigned int uiTraceLevel = WELS_LOG_ERROR;
  openh264enc->encoder->SetOption (ENCODER_OPTION_TRACE_LEVEL, &uiTraceLevel);

  openh264enc->encoder->GetDefaultParams (&enc_params);

  enc_params.iUsageType = openh264enc->usage_type;
  enc_params.iPicWidth = width;
  enc_params.iPicHeight = height;
  enc_params.iTargetBitrate = openh264enc->bitrate;
  enc_params.iMaxBitrate = openh264enc->max_bitrate;
  enc_params.iRCMode = openh264enc->rate_control;
  enc_params.iTemporalLayerNum = 1;
  enc_params.iSpatialLayerNum = 1;
  enc_params.iLtrMarkPeriod = 30;
  enc_params.iMultipleThreadIdc = openh264enc->multi_thread;
  enc_params.bEnableDenoise = openh264enc->enable_denoise;
  enc_params.iComplexityMode = openh264enc->complexity;
  enc_params.uiIntraPeriod = openh264enc->gop_size;
  enc_params.bEnableBackgroundDetection = openh264enc->background_detection;
  enc_params.bEnableAdaptiveQuant = openh264enc->adaptive_quantization;
  enc_params.bEnableSceneChangeDetect = openh264enc->scene_change_detection;
  enc_params.bEnableFrameSkip = openh264enc->enable_frame_skip;
  enc_params.bEnableLongTermReference = 0;
#if OPENH264_MINOR >= 4
  enc_params.eSpsPpsIdStrategy = CONSTANT_ID;
#else
  enc_params.bEnableSpsPpsIdAddition = 0;
#endif
  enc_params.bPrefixNalAddingCtrl = 0;
  enc_params.fMaxFrameRate = fps_n * 1.0 / fps_d;
  enc_params.iLoopFilterDisableIdc = openh264enc->deblocking_mode;
  enc_params.sSpatialLayers[0].uiProfileIdc = PRO_BASELINE;
  enc_params.sSpatialLayers[0].iVideoWidth = enc_params.iPicWidth;
  enc_params.sSpatialLayers[0].iVideoHeight = enc_params.iPicHeight;
  enc_params.sSpatialLayers[0].fFrameRate = fps_n * 1.0 / fps_d;
  enc_params.sSpatialLayers[0].iSpatialBitrate = enc_params.iTargetBitrate;
  enc_params.sSpatialLayers[0].iMaxSpatialBitrate = enc_params.iMaxBitrate;

  if (openh264enc->slice_mode == GST_OPENH264_SLICE_MODE_N_SLICES) {
    if (openh264enc->num_slices == 1)
      slice_mode = SM_SINGLE_SLICE;
    else
      slice_mode = SM_FIXEDSLCNUM_SLICE;
     n_slices = openh264enc->num_slices;
  } else if (openh264enc->slice_mode == GST_OPENH264_SLICE_MODE_AUTO) {
#if OPENH264_MAJOR == 1 && OPENH264_MINOR < 6
    slice_mode = SM_AUTO_SLICE;
#else
    slice_mode = SM_FIXEDSLCNUM_SLICE;
    n_slices = 0;
#endif
  } else {
    GST_ERROR_OBJECT (openh264enc, "unexpected slice mode %d",
        openh264enc->slice_mode);
    slice_mode = SM_SINGLE_SLICE;
  }

#if OPENH264_MAJOR == 1 && OPENH264_MINOR < 6
  enc_params.sSpatialLayers[0].sSliceCfg.uiSliceMode = slice_mode;
  enc_params.sSpatialLayers[0].sSliceCfg.sSliceArgument.uiSliceNum = n_slices;
#else
  enc_params.sSpatialLayers[0].sSliceArgument.uiSliceMode = slice_mode;
  enc_params.sSpatialLayers[0].sSliceArgument.uiSliceNum = n_slices;
#endif

  openh264enc->framerate = (1 + fps_n / fps_d);

  ret = openh264enc->encoder->InitializeExt (&enc_params);

  if (ret != cmResultSuccess) {
    GST_ERROR_OBJECT (openh264enc, "failed to initialize encoder");
    return FALSE;
  }

  openh264enc->encoder->SetOption (ENCODER_OPTION_DATAFORMAT, &video_format);

  outcaps =
      gst_caps_copy (gst_static_pad_template_get_caps
      (&gst_openh264enc_src_template));

  output_state = gst_video_encoder_set_output_state (encoder, outcaps, state);
  gst_video_codec_state_unref (output_state);

  return gst_video_encoder_negotiate (encoder);
}