Пример #1
0
int32_t WebrtcOpenH264VideoEncoder::InitEncode(
    const webrtc::VideoCodec* codecSettings,
    int32_t numberOfCores,
    uint32_t maxPayloadSize) {
  max_payload_size_ = maxPayloadSize;

  int rv = CreateSVCEncoder(&encoder_);
  if (rv) {
    return WEBRTC_VIDEO_CODEC_ERROR;
  }

  SVCEncodingParam param;
  memset(&param, 0, sizeof(param));

  MOZ_MTLOG(ML_INFO, "Initializing encoder at "
	    << codecSettings->width
	    << "x"
	    << codecSettings->height
	    << "@"
	    << static_cast<int>(codecSettings->maxFramerate));

  // Translate parameters.
  param.iPicWidth = codecSettings->width;
  param.iPicHeight = codecSettings->height;
  param.iTargetBitrate = codecSettings->startBitrate * 1000;
  param.iTemporalLayerNum = 1;
  param.iSpatialLayerNum = 1;
  // TODO([email protected]). Scary conversion from unsigned char to float below.
  param.fFrameRate = codecSettings->maxFramerate;
  param.iInputCsp = videoFormatI420;

  // Set up layers. Currently we have one layer.
  auto layer = &param.sSpatialLayers[0];

  layer->iVideoWidth = codecSettings->width;
  layer->iVideoHeight = codecSettings->height;
  layer->iQualityLayerNum = 1;
  layer->iSpatialBitrate = param.iTargetBitrate;
  layer->fFrameRate = param.fFrameRate;

  // Based on guidance from Cisco.
  layer->sSliceCfg.sSliceArgument.uiSliceMbNum[0] = 1000;
  layer->sSliceCfg.sSliceArgument.uiSliceNum = 1;
  layer->sSliceCfg.sSliceArgument.uiSliceSizeConstraint = 1000;

  rv = encoder_->Initialize(&param, INIT_TYPE_PARAMETER_BASED);
  if (rv)
    return WEBRTC_VIDEO_CODEC_MEMORY;

  return WEBRTC_VIDEO_CODEC_OK;
}
Пример #2
0
JNIEXPORT void JNICALL Java_co_splots_recorder_H264Encoder_init(JNIEnv* env,
		jobject thiz, jfloat frameRate, jint srcWidth, jint srcHeight,
		jobject cropRect, jint destWidth, jint destHeight, jstring outputPath) {
	jclass output_rect_class = (*env)->GetObjectClass(env, cropRect);
	jfieldID field_id = (*env)->GetFieldID(env, output_rect_class, "left", "I");
	crop_x = (*env)->GetIntField(env, cropRect, field_id);
	field_id = (*env)->GetFieldID(env, output_rect_class, "top", "I");
	crop_y = (*env)->GetIntField(env, cropRect, field_id);
	field_id = (*env)->GetFieldID(env, output_rect_class, "right", "I");
	crop_width = (*env)->GetIntField(env, cropRect, field_id) - crop_x;
	field_id = (*env)->GetFieldID(env, output_rect_class, "bottom", "I");
	crop_height = (*env)->GetIntField(env, cropRect, field_id) - crop_y;
	dest_width = (int) destWidth;
	dest_height = (int) destHeight;
	src_width = (int) srcWidth;
	src_height = (int) srcHeight;
	frame_rate = (float) frameRate;
	frame_count = 1;
	frame_rate_sum = 0;

	if (CreateSVCEncoder(&encoder) != cmResultSuccess) {
		throwJavaException(env, "java/io/IOException",
				"Couldn't create encoder.");
		return;
	}
	SEncParamBase sParam;
	memset(&sParam, 0, sizeof(SEncParamBase));
	sParam.fMaxFrameRate = frame_rate;
	sParam.iInputCsp = videoFormatI420;
	sParam.iPicWidth = dest_width;
	sParam.iPicHeight = dest_height;
	sParam.iTargetBitrate = 5000000;
	sParam.iRCMode = 0;

	if ((*encoder)->Initialize(encoder, &sParam) != cmResultSuccess) {
		throwJavaException(env, "java/io/IOException",
				"Couldn't initialize encoder.");
		return;
	}
	memset(&info, 0, sizeof(SFrameBSInfo));
	const char* output_file_path = (*env)->GetStringUTFChars(env, outputPath,
			(jboolean) 0);
	outfile = fopen(output_file_path, "wb");
	if (outfile == NULL) {
		throwJavaException(env, "java/io/IOException",
				"Could not open the output file");
		return;
	}
	(*env)->ReleaseStringUTFChars(env, outputPath, output_file_path);
}
Пример #3
0
 virtual void SetUp() {
   int rv = CreateSVCEncoder(&encoder_);
   ASSERT_EQ(0, rv);
   ASSERT_TRUE(encoder_ != NULL);
 }
Пример #4
0
static pj_status_t open_openh264_codec(openh264_private *ff,
                                     pj_mutex_t *ff_mutex)
{
    pjmedia_video_format_detail *vfd;
    pj_bool_t enc_opened = PJ_FALSE, dec_opened = PJ_FALSE;
    pj_status_t status;

    vfd = pjmedia_format_get_video_format_detail(&ff->param.enc_fmt, 
						 PJ_TRUE);

    /* Override generic params or apply specific params before opening
     * the codec.
     */
    if (ff->desc->preopen) {
		status = (*ff->desc->preopen)(ff);
		if (status != PJ_SUCCESS)
			goto on_error;
    }

    /* Open encoder */
    if (ff->param.dir & PJMEDIA_DIR_ENCODING) {
		int err;
		SEncParamExt *param = &ff->enc_param;
		const openh264_codec_desc *desc = &ff->desc[0];
		bool disable = 0;
		int iIndexLayer = 0;
		SSourcePicture *srcPic;

		pj_mutex_lock(ff_mutex);
		memset(param, 0x00, sizeof(SEncParamExt));
		CreateSVCEncoder(&ff->enc);
		
		/* Test for temporal, spatial, SNR scalability */
		param->fMaxFrameRate = (float)vfd->fps.num;		// input frame rate
		param->iPicWidth	= vfd->size.w;		// width of picture in samples
		param->iPicHeight	= vfd->size.h;		// height of picture in samples
		param->iTargetBitrate = desc->avg_bps;		// target bitrate desired
		param->bEnableRc = PJ_TRUE;           //  rc mode control
		param->iTemporalLayerNum = 3;	// layer number at temporal level
		param->iSpatialLayerNum	= 1;	// layer number at spatial level
		param->bEnableDenoise   = PJ_TRUE;    // denoise control
		param->bEnableBackgroundDetection = PJ_TRUE; // background detection control
		param->bEnableAdaptiveQuant       = PJ_TRUE; // adaptive quantization control
		param->bEnableFrameSkip           = PJ_TRUE; // frame skipping
		param->bEnableLongTermReference   = PJ_FALSE; // long term reference control
		param->bEnableFrameCroppingFlag   = PJ_FALSE;
		param->iLoopFilterDisableIdc = 0;

		param->iInputCsp			= videoFormatI420;			// color space of input sequence
		param->uiIntraPeriod		= 300;		// period of Intra frame
		param->bEnableSpsPpsIdAddition = 0;
		param->bPrefixNalAddingCtrl = 0;
		
		param->sSpatialLayers[iIndexLayer].iVideoWidth	= vfd->size.w;
		param->sSpatialLayers[iIndexLayer].iVideoHeight	= vfd->size.h;
		param->sSpatialLayers[iIndexLayer].fFrameRate	= (float)vfd->fps.num;		
		param->sSpatialLayers[iIndexLayer].iSpatialBitrate	= desc->avg_bps;
// 		param->sSpatialLayers[iIndexLayer].iDLayerQp = 50;
 		param->sSpatialLayers[iIndexLayer].uiProfileIdc = 66;
		param->sSpatialLayers[iIndexLayer].sSliceCfg.uiSliceMode = 4;
		param->sSpatialLayers[iIndexLayer].sSliceCfg.sSliceArgument.uiSliceSizeConstraint = PJMEDIA_MAX_VID_PAYLOAD_SIZE;

		err = callWelsEncoderFn(ff->enc)->InitializeExt(ff->enc, param);
		if (err == cmResultSuccess)
		{			
			callWelsEncoderFn(ff->enc)->SetOption(ff->enc, ENCODER_OPTION_ENABLE_SSEI, &disable);
			enc_opened = PJ_TRUE;
		}

		srcPic = malloc(sizeof(SSourcePicture));
		memset(srcPic, 0x00, sizeof(SSourcePicture));
		srcPic->iColorFormat = param->iInputCsp;
		srcPic->iPicWidth = param->iPicWidth;
		srcPic->iPicHeight = param->iPicHeight;
		srcPic->iStride[0] = param->iPicWidth;
		srcPic->iStride[1] = param->iPicWidth / 2;
		srcPic->iStride[2] = param->iPicWidth / 2;

		ff->srcPic = srcPic;
		pj_mutex_unlock(ff_mutex);				
    }

    /* Open decoder */
    if (ff->param.dir & PJMEDIA_DIR_DECODING) {
		SDecodingParam sDecParam = {0};

		pj_mutex_lock(ff_mutex);
		
		CreateDecoder(&ff->dec);
		sDecParam.iOutputColorFormat	= videoFormatI420;
		sDecParam.uiTargetDqLayer	= (unsigned char)-1;
		sDecParam.uiEcActiveFlag	= 1;
		sDecParam.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;
 		callWelsDecoderFn(ff->dec)->Initialize(ff->dec, &sDecParam);

		pj_mutex_unlock(ff_mutex);
		dec_opened = PJ_TRUE;
    }

    /* Let the codec apply specific params after the codec opened */
    if (ff->desc->postopen) {
		status = (*ff->desc->postopen)(ff);
		if (status != PJ_SUCCESS)
			goto on_error;
    }

    return PJ_SUCCESS;

on_error:
    return status;
}