示例#1
0
void *vc_open (int width, int height, double fps)
{
	Ctx *ctx = new Ctx;

	ctx->force_keyframe = 0;

	// 设置编码属性
	//x264_param_default(&ctx->param);
	x264_param_default_preset(&ctx->param, "fast", "zerolatency");
	//x264_param_apply_profile(&ctx->param, "baseline");

	ctx->param.i_width = width;
	ctx->param.i_height = height;
	ctx->param.b_repeat_headers = 1;  // 重复SPS/PPS 放到关键帧前面
	ctx->param.b_cabac = 1;
        ctx->param.i_threads = 1;

	// ctx->param.b_intra_refresh = 1;

	ctx->param.i_fps_num = (int)fps;
	ctx->param.i_fps_den = 1;
	//ctx->param.b_vfr_input = 1;

	ctx->param.i_keyint_max = ctx->param.i_fps_num * 2;
	//ctx->param.i_keyint_min = 1;

	// rc
	// ctx->param.rc.i_rc_method = X264_RC_CRF;
	ctx->param.rc.i_bitrate = 50;
	//ctx->param.rc.f_rate_tolerance = 0.1;
	//ctx->param.rc.i_vbv_max_bitrate = ctx->param.rc.i_bitrate * 1.3;
	//ctx->param.rc.f_rf_constant = 600;
	//ctx->param.rc.f_rf_constant_max = ctx->param.rc.f_rf_constant * 1.3;

#ifdef DEBUG
	ctx->param.i_log_level = X264_LOG_WARNING;
#else
	ctx->param.i_log_level = X264_LOG_NONE;
#endif // release

	ctx->x264 = x264_encoder_open(&ctx->param);
	if (!ctx->x264) {
		fprintf(stderr, "%s: x264_encoder_open err\n", __func__);
		delete ctx;
		return 0;
	}

	x264_picture_init(&ctx->picture);
	ctx->picture.img.i_csp = X264_CSP_I420;
	ctx->picture.img.i_plane = 3;

	ctx->output = malloc(128*1024);
	ctx->output_bufsize = 128*1024;
	ctx->output_datasize = 0;

	ctx->get_pts = first_pts;
	ctx->info_valid = 0;

	return ctx;
}
bool VideoEncoder::initializePic() {

  unsigned int csp = (vflip) ? X264_CSP_I420 | X264_CSP_VFLIP : X264_CSP_I420;
  x264_picture_init(&pic_in);
  pic_in.img.i_csp = csp;
  pic_in.img.i_plane = 3;

  return true;
}
示例#3
0
void msx264::msx264_pic_init()
{
    pPicIn = new x264_picture_t;
    pPicOut = new x264_picture_t;

	x264_picture_init(pPicOut);

    x264_picture_alloc(pPicIn, X264_CSP_I420, params.i_width,
			   params.i_height);

	pPicIn->img.i_csp = X264_CSP_I420;
	pPicIn->img.i_plane = 3;
}
void rtspStream::initH264Encoder(int width,int height,int fps,int bitRate)
{
	frame_num = 0; 
	pX264Handle   = NULL;
	pX264Param = new x264_param_t;
	assert(pX264Param);
	m_nFPS = 25;
	//* 配置参数
	//* 使用默认参数,在这里因为我的是实时网络传输,所以我使用了zerolatency的选项,使用这个选项之后就不会有delayed_frames,如果你使用的不是这样的话,还需要在编码完成之后得到缓存的编码帧
	x264_param_default_preset(pX264Param, "veryfast", "zerolatency");
	//* cpuFlags
	pX264Param->i_threads  = X264_SYNC_LOOKAHEAD_AUTO;//* 取空缓冲区继续使用不死锁的保证.
	//* 视频选项
	pX264Param->i_width   = width; //* 要编码的图像宽度.
	pX264Param->i_height  = height; //* 要编码的图像高度
	pX264Param->i_frame_total = 0; //* 编码总帧数.不知道用0.
	pX264Param->i_keyint_max = 10; 
	//* 流参数
	pX264Param->i_bframe  = 5;
	pX264Param->b_open_gop  = 0;
	pX264Param->i_bframe_pyramid = 0;
	pX264Param->i_bframe_adaptive = X264_B_ADAPT_TRELLIS;
	//* Log参数,不需要打印编码信息时直接注释掉就行
	// pX264Param->i_log_level  = X264_LOG_DEBUG;
	//* 速率控制参数
	pX264Param->rc.i_bitrate = bitRate;//* 码率(比特率,单位Kbps)
	//* muxing parameters
	pX264Param->i_fps_den  = 1; //* 帧率分母
	pX264Param->i_fps_num  = fps;//* 帧率分子
	pX264Param->i_timebase_den = pX264Param->i_fps_num;
	pX264Param->i_timebase_num = pX264Param->i_fps_den;
	//* 设置Profile.使用Baseline profile
	x264_param_apply_profile(pX264Param, x264_profile_names[0]);

	pNals = NULL;
	pPicIn = new x264_picture_t;
	pPicOut = new x264_picture_t;
	x264_picture_init(pPicOut);
	x264_picture_alloc(pPicIn, X264_CSP_I420, pX264Param->i_width, pX264Param->i_height);
	pPicIn->img.i_csp = X264_CSP_I420;
	pPicIn->img.i_plane = 3;
	//* 打开编码器句柄,通过x264_encoder_parameters得到设置给X264
	//* 的参数.通过x264_encoder_reconfig更新X264的参数
	pX264Handle = x264_encoder_open(pX264Param);
	assert(pX264Handle);

	pPicIn->img.plane[0] = PYUVBuf;
	pPicIn->img.plane[1] = PYUVBuf + width *height;
	pPicIn->img.plane[2] = PYUVBuf + width * height * 5 / 4;
	pPicIn->img.plane[3] = 0;
}
void mpeg_video_recorder::process_frame(const void* data, uints size, uint64 timestamp_ns, uint nbatchframes, bool video_end){

	if (!m_IsRecording){
		// this should't be there but last frame isn't uniqe so i must skip frames sent after "last" frame
		return;
	}

	if (m_iLastFramesize > 0 && m_uiFrameIndex > 0){
		coid::uint64 timeMs = (timestamp_ns - m_uiLastTimestampNs) / 1000000.0;
		MP4Duration frameDuration = timeMs * 90; // 90 000 ticks per seconds in mp4 container
		MP4WriteSample(m_hMp4FileHandle, m_iVideoTrackID, m_pLastNals[0].p_payload, m_iLastFramesize, frameDuration);
		m_uiLastTimestampNs = timestamp_ns;
	}
	else if(m_uiFrameIndex == 0){
		m_uiLastTimestampNs = timestamp_ns;
	}
	else{
		log(WARNINGMESSAGE("Frameskip detected!"));
	}

	uchar * ucharData = (uchar*)data;

	uint width4 = GetDivisibleBy4(m_iWidth);
	uint halfWidth4 = GetDivisibleBy4(m_iWidth >> 1);

	x264_picture_t pic_in,pic_out;
	x264_picture_init(&pic_in);
	pic_in.img.i_csp = X264_CSP_I420;
	pic_in.img.i_plane = 3;
	pic_in.img.plane[0] = ucharData;
	pic_in.img.plane[1] = ucharData + width4;
	pic_in.img.plane[2] = ucharData + 2 * width4 + halfWidth4;
	pic_in.img.i_stride[0] = width4 + halfWidth4;
	pic_in.img.i_stride[2] = pic_in.img.i_stride[1] = (width4 + halfWidth4) * 2;
	
	int i_nals;
	
	m_iLastFramesize = x264_encoder_encode(m_pEncoder, &m_pLastNals, &i_nals, &pic_in, &pic_out);

	m_uiFrameIndex++;

	if (video_end){
		if (m_iLastFramesize > 0){
			MP4Duration frameDuration = 33 * 90;  // 90 000 ticks per seconds in mp4 container
			MP4WriteSample(m_hMp4FileHandle, m_iVideoTrackID, m_pLastNals[0].p_payload, m_iLastFramesize, frameDuration);
		}
		StopRecording();
	}	
}
示例#6
0
static int put_image(struct vf_instance *vf, mp_image_t *mpi, double pts)
{
    h264_module_t *mod=(h264_module_t*)vf->priv;
    int i;

    x264_picture_init(&mod->pic);
    mod->pic.img.i_csp=param.i_csp;
    mod->pic.img.i_plane=3;
    for(i=0; i<4; i++) {
        mod->pic.img.plane[i] = mpi->planes[i];
        mod->pic.img.i_stride[i] = mpi->stride[i];
    }

    mod->pic.i_type = X264_TYPE_AUTO;
    if (is_forced_key_frame(pts))
        mod->pic.i_type = X264_TYPE_KEYFRAME;

    return encode_frame(vf, &mod->pic) >= 0;
}
示例#7
0
static inline void init_pic_data(struct obs_x264 *obsx264, x264_picture_t *pic,
		struct encoder_frame *frame)
{
	x264_picture_init(pic);

	pic->i_pts = frame->pts;
	pic->img.i_csp = obsx264->params.i_csp;

	if (obsx264->params.i_csp == X264_CSP_NV12)
		pic->img.i_plane = 2;
	else if (obsx264->params.i_csp == X264_CSP_I420)
		pic->img.i_plane = 3;
	else if (obsx264->params.i_csp == X264_CSP_I444)
		pic->img.i_plane = 3;

	for (int i = 0; i < pic->img.i_plane; i++) {
		pic->img.i_stride[i] = (int)frame->linesize[i];
		pic->img.plane[i]    = frame->data[i];
	}
}
示例#8
0
    bool Encode(LPVOID picInPtr, LPVOID nalOut, int *pNalNum)
    {
        x264_picture_t *picIn = (x264_picture_t*)picInPtr;
        x264_picture_t picOut;
		x264_picture_init(&picOut);

        if(x264_encoder_encode(x264, (x264_nal_t**)&nalOut, pNalNum, picIn, &picOut) < 0)
        {
            DOLOG("x264 encode failed");
            return false;
        }
		int countCacheFrame = x264_encoder_delayed_frames(x264);
		DOLOG("当前被缓存的帧数为" + countCacheFrame);
		int res = 0;
		
		if (countCacheFrame>0)
		{
			res = x264_encoder_encode(x264, (x264_nal_t**)&nalOut, pNalNum, NULL, &picOut);
		}

        return true;
    }
示例#9
0
bool NXTMSWriter::open(const char* filepath, const char* suffix, NXIN NXSDKMediaFormat& mediaFormat)
{
	_filepath = filepath ;
	_nxformat = mediaFormat ;
	_pxFormat = (AVPixelFormat)NXKit::pixel_format_convert_nx(mediaFormat.video_pixfmt) ;

	_fileObjt = new NXFile(_filepath.c_str());
	if(_fileObjt->exist())
		_fileObjt->remove() ;

	if(!_fileObjt->open("wb")) {
		__android_log_print(ANDROID_LOG_ERROR, "NXTM", "TMP4 Sync Writer Open File Failure!!");
		return false ;
	}

	memcpy(_fileHead.headr_codec, "MPT ", 4) ;

	_fileHead.media_time  = 0 ;
	_fileHead.track_masks = mediaFormat.track_mask ;

	_fileHead.video_frames = 0 ;
	_fileHead.frame_rate   = mediaFormat.video_format.frame_rate ;
	_fileHead.video_width  = mediaFormat.video_format.width ;
	_fileHead.video_height = mediaFormat.video_format.height ;
	_fileHead.video_pixfmt = NXSDK_PIX_FMT_YUV420P ;

	_fileHead.audio_channels = mediaFormat.audio_format.channels ;
	_fileHead.audio_smpbits  = mediaFormat.audio_format.sample_bits ;
	_fileHead.audio_smprate  = mediaFormat.audio_format.sample_rate ;

	//init x264 param
	x264_param_default(&_x264Param) ;
	x264_param_default_preset(&_x264Param, x264_preset_names[0], x264_tune_names[0]);
	x264_param_apply_profile(&_x264Param, x264_profile_names[0]); //baseline, it is fast !!

	_x264Param.i_threads 	= X264_THREADS_AUTO ; //多线程编码,速度快的保证。
	_x264Param.i_width   	= _fileHead.video_width ;
	_x264Param.i_height  	= _fileHead.video_height ;
	_x264Param.i_keyint_min = 0 ;
	_x264Param.i_keyint_max = _fileHead.frame_rate/2 ;
	_x264Param.i_bframe     = 0 ;
	_x264Param.b_open_gop   = 0 ;
	_x264Param.i_bframe_pyramid = 0 ;
	_x264Param.i_bframe_adaptive = X264_B_ADAPT_NONE ;
	_x264Param.i_fps_den    = 1 ;
	_x264Param.i_fps_num    = _fileHead.frame_rate ;
	_x264Param.i_timebase_den = 1 ;
	_x264Param.i_timebase_num = _fileHead.frame_rate ;

	_x264Param.rc.i_rc_method 	= X264_RC_CRF ; //X264_RC_CQP:固定压缩质量, X264_RC_CRF:固定码率, X264_RC_ABR:平均码率
	//_x264Param.rc.i_qp_constant = 20 ;
	_x264Param.rc.i_qp_max	  	= 20 ; //尽量保证原始视频质量。
	_x264Param.rc.i_qp_min    	= 0 ;
	_x264Param.rc.i_bitrate   	= 0 ; //不设置码率.

	_x264Encoder = x264_encoder_open(&_x264Param) ;
	if(!_x264Encoder) {
		__android_log_print(ANDROID_LOG_ERROR, "NXTM", "TMP4 Sync Writer Get X264 Encoder Failure!!");
		return false ;
	}

	_x264ImageI = new x264_picture_t ;
	_x264ImageO = new x264_picture_t ;

	x264_picture_init(_x264ImageI);
	x264_picture_init(_x264ImageO);
	x264_picture_alloc(_x264ImageI, X264_CSP_I420, _x264Param.i_width, _x264Param.i_height) ;

	_x264ImageI->img.i_csp   = X264_CSP_I420 ;
	_x264ImageI->img.i_plane = 3 ;
	_x264ImageI->img.i_stride[0] = _x264Param.i_width ;
	_x264ImageI->img.i_stride[1] = _x264Param.i_width>>1 ;
	_x264ImageI->img.i_stride[2] = _x264Param.i_width>>1 ;

	_avscaler = sws_getContext(_nxformat.video_format.width, _nxformat.video_format.height, _pxFormat,
							   _nxformat.video_format.width, _nxformat.video_format.height, AV_PIX_FMT_YUV420P, SWS_BICUBIC , NULL, NULL, NULL);
	if(_avscaler == NULL) {
		__android_log_print(ANDROID_LOG_ERROR, "NXTM", "TMP4 Sync Writer sws_getContext() Failure!!");
		return false ;
	}

	_frmcount  = 0 ;

	return true ;
}
示例#10
0
int main(int argc, char **argv) {

    int ret = 0;
    int y_size = 0;
    int i = 0;
    int j = 0;

    //Encode 50 frame
    //if set 0, encode all frame
    int frame_num = 50;
    int csp = X264_CSP_I420;
    int width = 640;
    int height = 360;

    int i_nal = 0;
    x264_nal_t *p_nals = NULL;
    x264_t *p_handle = NULL;
    x264_picture_t *p_pic_in = (x264_picture_t *) malloc(sizeof(x264_picture_t));
    x264_picture_t *p_pic_out = (x264_picture_t *) malloc(sizeof(x264_picture_t));
    x264_param_t *p_param = (x264_param_t *) malloc(sizeof(x264_param_t));

    //FILE* fp_src  = fopen("../cuc_ieschool_640x360_yuv444p.yuv", "rb");
    FILE *fp_src = fopen("../cuc_ieschool_640x360_yuv420p.yuv", "rb");

    FILE *fp_dst = fopen("cuc_ieschool.h264", "wb");

    //Check
    if ((NULL == fp_src) || (NULL == fp_dst)) {
        lwlog_err("Open files error.");
        return -1;
    }

    x264_param_default(p_param);
    p_param->i_width = width;
    p_param->i_height = height;
    /*
    //Param
    p_param->i_log_level  = X264_LOG_DEBUG;
    p_param->i_threads  = X264_SYNC_LOOKAHEAD_AUTO;
    p_param->i_frame_total = 0;
    p_param->i_keyint_max = 10;
    p_param->i_bframe  = 5;
    p_param->b_open_gop  = 0;
    p_param->i_bframe_pyramid = 0;
    p_param->rc.i_qp_constant=0;
    p_param->rc.i_qp_max=0;
    p_param->rc.i_qp_min=0;
    p_param->i_bframe_adaptive = X264_B_ADAPT_TRELLIS;
    p_param->i_fps_den  = 1;
    p_param->i_fps_num  = 25;
    p_param->i_timebase_den = p_param->i_fps_num;
    p_param->i_timebase_num = p_param->i_fps_den;
    */
    p_param->i_csp = csp;
    x264_param_apply_profile(p_param, x264_profile_names[5]);

    p_handle = x264_encoder_open(p_param);

    x264_picture_init(p_pic_out);
    x264_picture_alloc(p_pic_in, csp, p_param->i_width, p_param->i_height);

    //ret = x264_encoder_headers(p_handle, &p_nals, &i_nal);

    y_size = p_param->i_width * p_param->i_height;
    //detect frame number
    if (0 == frame_num) {
        fseek(fp_src, 0, SEEK_END);
        switch (csp) {
        case X264_CSP_I444:
            frame_num = ftell(fp_src) / (y_size * 3);
            break;
        case X264_CSP_I420:
            frame_num = ftell(fp_src) / (y_size * 3 / 2);
            break;
        default:
            lwlog_err("Colorspace Not Support");
            return -1;
        }
        fseek(fp_src, 0, SEEK_SET);
    }

    //Loop to Encode
    for (i = 0; i < frame_num; ++i) {
        switch (csp) {
        case X264_CSP_I444: {
            fread(p_pic_in->img.plane[0], y_size, 1, fp_src);    //Y
            fread(p_pic_in->img.plane[1], y_size, 1, fp_src);    //U
            fread(p_pic_in->img.plane[2], y_size, 1, fp_src);    //V
            break;
        }
        case X264_CSP_I420: {
            fread(p_pic_in->img.plane[0], y_size, 1, fp_src);    //Y
            fread(p_pic_in->img.plane[1], y_size / 4, 1, fp_src);    //U
            fread(p_pic_in->img.plane[2], y_size / 4, 1, fp_src);    //V
            break;
        }
        default: {
            lwlog_err("Colorspace Not Support.\n");
            return -1;
        }
        }
        p_pic_in->i_pts = i;

        ret = x264_encoder_encode(p_handle, &p_nals, &i_nal, p_pic_in, p_pic_out);
        if (ret < 0) {
            lwlog_err("x264_encoder_encode error");
            return -1;
        }

        lwlog_info("Succeed encode frame: %5d\n", i);

        for (j = 0; j < i_nal; ++j) {
            fwrite(p_nals[j].p_payload, 1, p_nals[j].i_payload, fp_dst);
        }
    }
    i = 0;
    //flush encoder
    while (1) {
        ret = x264_encoder_encode(p_handle, &p_nals, &i_nal, NULL, p_pic_out);
        if (0 == ret) {
            break;
        }
        lwlog_info("Flush 1 frame.");
        for (j = 0; j < i_nal; ++j) {
            fwrite(p_nals[j].p_payload, 1, p_nals[j].i_payload, fp_dst);
        }
        ++i;
    }
    x264_picture_clean(p_pic_in);
    x264_encoder_close(p_handle);
    p_handle = NULL;

    free(p_pic_in);
    free(p_pic_out);
    free(p_param);

    fclose(fp_src);
    fclose(fp_dst);

    return 0;
}
示例#11
0
void handleImage(const sensor_msgs::ImageConstPtr& img)
{
	ros::Time now = ros::Time::now();
	if(now - g_lastImageTime < g_minTimeBetweenImages)
		return;
	g_lastImageTime = now;

	ros::Time start = ros::Time::now();

	cv_bridge::CvImageConstPtr cvImg = cv_bridge::toCvShare(img, "bgr8");

	int height = g_width * cvImg->image.rows / cvImg->image.cols;

	cv::Mat resized;
	cv::resize(cvImg->image, resized, cv::Size(g_width, height), CV_INTER_AREA);

	if(!g_encoder)
	{
		x264_param_t params;
		x264_param_default(&params);
		x264_param_apply_profile(&params, "high");
		x264_param_default_preset(&params, "ultrafast", "zerolatency");

		params.i_width = g_width;
		params.i_height = height;
		params.b_repeat_headers = 1;
		params.b_intra_refresh = 1;
		params.i_fps_num = 1;
		params.i_fps_den = 10;
		params.i_frame_reference = 1;
		params.i_keyint_max = 20;
		params.i_bframe = 0;
		params.b_open_gop = 0;
	// 	params.rc.i_rc_method = X264_RC_CRF;
	// // 	params.rc.i_qp_min = params.rc.i_qp_max = 47;
	// 	params.rc.i_vbv_buffer_size = 6;
	// 	params.rc.i_vbv_max_bitrate = 6000;
	// 	params.rc.i_bitrate = 6;
		params.rc.i_rc_method = X264_RC_CRF;
		params.rc.i_vbv_buffer_size = 1000;
		params.rc.i_vbv_max_bitrate = 1000;
		params.rc.i_bitrate = 1000;
		params.i_threads = 4;

		g_encoder = x264_encoder_open(&params);

		x264_picture_init(&g_inputPicture);
		x264_picture_init(&g_outPicture);

		g_encoderConfiguredHeight = height;

		g_inBuf.resize(g_width*height + g_width*height/2);
	}
	else if(height != g_encoderConfiguredHeight)
	{
		ROS_ERROR("Image dimensions changed!");
		throw std::runtime_error("Image dimensions changed");
	}

	RGB_to_YUV420(resized.data, g_inBuf.data(), g_width, height);

	g_inputPicture.img.plane[0] = g_inBuf.data();
	g_inputPicture.img.plane[1] = g_inBuf.data() + g_width*height;
	g_inputPicture.img.plane[2] = g_inBuf.data() + g_width*height + g_width*height/4;

	g_inputPicture.img.i_stride[0] = g_width;
	g_inputPicture.img.i_stride[1] = g_width/2;
	g_inputPicture.img.i_stride[2] = g_width/2;

	g_inputPicture.img.i_csp = X264_CSP_I420;
	g_inputPicture.img.i_plane = 3;

	x264_nal_t* nals;
	int numNals;

	x264_encoder_encode(g_encoder, &nals, &numNals, &g_inputPicture, &g_outPicture);

	std::size_t size = 0;
	for(int i = 0; i < numNals; ++i)
		size += nals[i].i_payload;

	sensor_msgs::CompressedImagePtr msg(new sensor_msgs::CompressedImage);

	msg->header = img->header;
	msg->format = "h264";

	msg->data.resize(size);
	unsigned int off = 0;

	for(int i = 0; i < numNals; ++i)
	{
		memcpy(msg->data.data() + off, nals[i].p_payload, nals[i].i_payload);
		off += nals[i].i_payload;
	}

	g_pub.publish(msg);
	ROS_DEBUG("took %f", (ros::Time::now() - start).toSec());
}
示例#12
0
int encode_x264(struct videnc_state *st, bool update,
		const struct vidframe *frame)
{
	x264_picture_t pic_in, pic_out;
	x264_nal_t *nal;
	int i_nal;
	int i, err, ret;
	int csp, pln;

	if (!st || !frame)
		return EINVAL;

	switch (frame->fmt) {

	case VID_FMT_YUV420P:
		csp = X264_CSP_I420;
		pln = 3;
		break;

	case VID_FMT_NV12:
		csp = X264_CSP_NV12;
		pln = 2;
		break;

	default:
		warning("avcodec: pixel format not supported (%s)\n",
			vidfmt_name(frame->fmt));
		return ENOTSUP;
	}

	if (!st->x264 || !vidsz_cmp(&st->encsize, &frame->size)) {

		err = open_encoder_x264(st, &st->encprm, &frame->size, csp);
		if (err)
			return err;
	}

	if (update) {
#if X264_BUILD >= 95
		x264_encoder_intra_refresh(st->x264);
#endif
		debug("avcodec: x264 picture update\n");
	}

	x264_picture_init(&pic_in);

	pic_in.i_type = update ? X264_TYPE_IDR : X264_TYPE_AUTO;
	pic_in.i_qpplus1 = 0;
	pic_in.i_pts = ++st->pts;

	pic_in.img.i_csp = csp;
	pic_in.img.i_plane = pln;
	for (i=0; i<pln; i++) {
		pic_in.img.i_stride[i] = frame->linesize[i];
		pic_in.img.plane[i]    = frame->data[i];
	}

	ret = x264_encoder_encode(st->x264, &nal, &i_nal, &pic_in, &pic_out);
	if (ret < 0) {
		fprintf(stderr, "x264 [error]: x264_encoder_encode failed\n");
	}
	if (i_nal == 0)
		return 0;

	err = 0;
	for (i=0; i<i_nal && !err; i++) {
		const uint8_t hdr = nal[i].i_ref_idc<<5 | nal[i].i_type<<0;
		int offset = 0;

#if X264_BUILD >= 76
		const uint8_t *p = nal[i].p_payload;

		/* Find the NAL Escape code [00 00 01] */
		if (nal[i].i_payload > 4 && p[0] == 0x00 && p[1] == 0x00) {
			if (p[2] == 0x00 && p[3] == 0x01)
				offset = 4 + 1;
			else if (p[2] == 0x01)
				offset = 3 + 1;
		}
#endif

		/* skip Supplemental Enhancement Information (SEI) */
		if (nal[i].i_type == H264_NAL_SEI)
			continue;

		err = h264_nal_send(true, true, (i+1)==i_nal, hdr,
				    nal[i].p_payload + offset,
				    nal[i].i_payload - offset,
				    st->encprm.pktsize, st->pkth, st->arg);
	}

	return err;
}
示例#13
0
static int convert_obe_to_x264_pic( x264_picture_t *pic, obe_raw_frame_t *raw_frame )
{
    obe_image_t *img = &raw_frame->img;
    int idx = 0, count = 0;

    x264_picture_init( pic );

    memcpy( pic->img.i_stride, img->stride, sizeof(img->stride) );
    memcpy( pic->img.plane, img->plane, sizeof(img->plane) );
    pic->img.i_plane = img->planes;
    pic->img.i_csp = img->csp == PIX_FMT_YUV422P || img->csp == PIX_FMT_YUV422P10 ? X264_CSP_I422 : X264_CSP_I420;

    if( X264_BIT_DEPTH == 10 )
        pic->img.i_csp |= X264_CSP_HIGH_DEPTH;

    for( int i = 0; i < raw_frame->num_user_data; i++ )
    {
        /* Only give correctly formatted data to the encoder */
        if( raw_frame->user_data[i].type == USER_DATA_AVC_REGISTERED_ITU_T35 ||
            raw_frame->user_data[i].type == USER_DATA_AVC_UNREGISTERED )
        {
            count++;
        }
    }

    pic->extra_sei.num_payloads = count;

    if( pic->extra_sei.num_payloads )
    {
        pic->extra_sei.sei_free = free;
        pic->extra_sei.payloads = malloc( pic->extra_sei.num_payloads * sizeof(*pic->extra_sei.payloads) );

        if( !pic->extra_sei.payloads )
            return -1;

        for( int i = 0; i < raw_frame->num_user_data; i++ )
        {
            /* Only give correctly formatted data to the encoder */
            if( raw_frame->user_data[i].type == USER_DATA_AVC_REGISTERED_ITU_T35 ||
                raw_frame->user_data[i].type == USER_DATA_AVC_UNREGISTERED )
            {
                pic->extra_sei.payloads[idx].payload_type = raw_frame->user_data[i].type;
                pic->extra_sei.payloads[idx].payload_size = raw_frame->user_data[i].len;
                pic->extra_sei.payloads[idx].payload = raw_frame->user_data[i].data;
                idx++;
            }
            else
            {
                syslog( LOG_WARNING, "Invalid user data presented to encoder - type %i \n", raw_frame->user_data[i].type );
                free( raw_frame->user_data[i].data );
            }
        }
    }
    else if( raw_frame->num_user_data )
    {
        for( int i = 0; i < raw_frame->num_user_data; i++ )
        {
            syslog( LOG_WARNING, "Invalid user data presented to encoder - type %i \n", raw_frame->user_data[i].type );
            free( raw_frame->user_data[i].data );
        }
    }

    return 0;
}
示例#14
0
//todo: this function is an abomination, this is just disgusting.  fix it.
//...seriously, this is really, really horrible.  I mean this is amazingly bad.
void OBS::MainCaptureLoop()
{
    int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0;
    int copyWait = NUM_RENDER_BUFFERS-1;

    bSentHeaders = false;
    bFirstAudioPacket = true;

    bool bLogLongFramesProfile = GlobalConfig->GetInt(TEXT("General"), TEXT("LogLongFramesProfile"), LOGLONGFRAMESDEFAULT) != 0;
    float logLongFramesProfilePercentage = GlobalConfig->GetFloat(TEXT("General"), TEXT("LogLongFramesProfilePercentage"), 10.f);

    Vect2 baseSize    = Vect2(float(baseCX), float(baseCY));
    Vect2 outputSize  = Vect2(float(outputCX), float(outputCY));
    Vect2 scaleSize   = Vect2(float(scaleCX), float(scaleCY));

    HANDLE hMatrix   = yuvScalePixelShader->GetParameterByName(TEXT("yuvMat"));
    HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI"));

    //----------------------------------------
    // x264 input buffers

    int curOutBuffer = 0;

    bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;
    bUsing444 = false;

    EncoderPicture lastPic;
    EncoderPicture outPics[NUM_OUT_BUFFERS];

    for(int i=0; i<NUM_OUT_BUFFERS; i++)
    {
        if(bUsingQSV)
        {
            outPics[i].mfxOut = new mfxFrameSurface1;
            memset(outPics[i].mfxOut, 0, sizeof(mfxFrameSurface1));
            mfxFrameData& data = outPics[i].mfxOut->Data;
            videoEncoder->RequestBuffers(&data);
        }
        else
        {
            outPics[i].picOut = new x264_picture_t;
            x264_picture_init(outPics[i].picOut);
        }
    }

    if(bUsing444)
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
        {
            outPics[i].picOut->img.i_csp   = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
            outPics[i].picOut->img.i_plane = 1;
        }
    }
    else
    {
        if(!bUsingQSV)
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
                x264_picture_alloc(outPics[i].picOut, X264_CSP_NV12, outputCX, outputCY);
    }

    int bCongestionControl = AppConfig->GetInt (TEXT("Video Encoding"), TEXT("CongestionControl"), 0);
    bool bDynamicBitrateSupported = App->GetVideoEncoder()->DynamicBitrateSupported();
    int defaultBitRate = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int currentBitRate = defaultBitRate;
    QWORD lastAdjustmentTime = 0;
    UINT adjustmentStreamId = 0;

    //std::unique_ptr<ProfilerNode> encodeThreadProfiler;

    //----------------------------------------
    // time/timestamp stuff

    bool bWasLaggedFrame = false;

    totalStreamTime = 0;
    lastAudioTimestamp = 0;

    //----------------------------------------
    // start audio capture streams

    desktopAudio->StartCapture();
    if(micAudio) micAudio->StartCapture();

    //----------------------------------------
    // status bar/statistics stuff

    DWORD fpsCounter = 0;

    int numLongFrames = 0;
    int numTotalFrames = 0;

    bytesPerSec = 0;
    captureFPS = 0;
    curFramesDropped = 0;
    curStrain = 0.0;
    PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);

    QWORD lastBytesSent[3] = {0, 0, 0};
    DWORD lastFramesDropped = 0;
    double bpsTime = 0.0;

    double lastStrain = 0.0f;
    DWORD numSecondsWaited = 0;

    //----------------------------------------
    // 444->420 thread data

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads);
    Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads);

    zero(h420Threads, sizeof(HANDLE)*numThreads);
    zero(convertInfo, sizeof(Convert444Data)*numThreads);

    for(int i=0; i<numThreads; i++)
    {
        convertInfo[i].width  = outputCX;
        convertInfo[i].height = outputCY;
        convertInfo[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].bNV12 = bUsingQSV;
        convertInfo[i].numThreads = numThreads;

        if(i == 0)
            convertInfo[i].startY = 0;
        else
            convertInfo[i].startY = convertInfo[i-1].endY;

        if(i == (numThreads-1))
            convertInfo[i].endY = outputCY;
        else
            convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bool bEncode;
    bool bFirstFrame = true;
    bool bFirstImage = true;
    bool bFirstEncode = true;
    bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444;

    List<HANDLE> completeEvents;

    if(bUseThreaded420)
    {
        for(int i=0; i<numThreads; i++)
        {
            h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i);
            completeEvents << convertInfo[i].hSignalComplete;
        }
    }

    //----------------------------------------

    QWORD streamTimeStart  = GetQPCTimeNS();
    QWORD lastStreamTime   = 0;
    QWORD firstFrameTimeMS = streamTimeStart/1000000;
    QWORD frameLengthNS    = 1000000000/fps;

    while(WaitForSingleObject(hVideoEvent, INFINITE) == WAIT_OBJECT_0)
    {
        if (bShutdownVideoThread)
            break;

        QWORD renderStartTime = GetQPCTimeNS();
        totalStreamTime = DWORD((renderStartTime-streamTimeStart)/1000000);

        bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled;

        QWORD renderStartTimeMS = renderStartTime/1000000;

        QWORD curStreamTime = latestVideoTimeNS;
        if (!lastStreamTime)
            lastStreamTime = curStreamTime-frameLengthNS;
        QWORD frameDelta = curStreamTime-lastStreamTime;
        //if (!lastStreamTime)
        //    lastStreamTime = renderStartTime-frameLengthNS;
        //QWORD frameDelta = renderStartTime-lastStreamTime;
        double fSeconds = double(frameDelta)*0.000000001;
        //lastStreamTime = renderStartTime;

        bool bUpdateBPS = false;

        profileIn("video thread frame");

        //Log(TEXT("Stream Time: %llu"), curStreamTime);
        //Log(TEXT("frameDelta: %lf"), fSeconds);

        //------------------------------------

        if(bRequestKeyframe && keyframeWait > 0)
        {
            keyframeWait -= int(frameDelta);

            if(keyframeWait <= 0)
            {
                GetVideoEncoder()->RequestKeyframe();
                bRequestKeyframe = false;
            }
        }

        if(!pushToTalkDown && pushToTalkTimeLeft > 0)
        {
            pushToTalkTimeLeft -= int(frameDelta);
            OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft);
            if(pushToTalkTimeLeft <= 0)
            {
                pushToTalkTimeLeft = 0;
                bPushToTalkOn = false;
            }
        }

        //------------------------------------

        OSEnterMutex(hSceneMutex);

        if (bPleaseEnableProjector)
            ActuallyEnableProjector();
        else if(bPleaseDisableProjector)
            DisableProjector();

        if(bResizeRenderView)
        {
            GS->ResizeView();
            bResizeRenderView = false;
        }

        //------------------------------------

        if(scene)
        {
            profileIn("scene->Preprocess");
            scene->Preprocess();

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Preprocess();

            profileOut;

            scene->Tick(float(fSeconds));

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Tick(float(fSeconds));
        }

        //------------------------------------

        QWORD curBytesSent = 0;
        
        if (network) {
            curBytesSent = network->GetCurrentSentBytes();
            curFramesDropped = network->NumDroppedFrames();
        } else if (numSecondsWaited) {
            //reset stats if the network disappears
            bytesPerSec = 0;
            bpsTime = 0;
            numSecondsWaited = 0;
            curBytesSent = 0;
            zero(lastBytesSent, sizeof(lastBytesSent));
        }

        bpsTime += fSeconds;
        if(bpsTime > 1.0f)
        {
            if(numSecondsWaited < 3)
                ++numSecondsWaited;

            //bytesPerSec = DWORD(curBytesSent - lastBytesSent);
            bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited;

            if(bpsTime > 2.0)
                bpsTime = 0.0f;
            else
                bpsTime -= 1.0;

            if(numSecondsWaited == 3)
            {
                lastBytesSent[0] = lastBytesSent[1];
                lastBytesSent[1] = lastBytesSent[2];
                lastBytesSent[2] = curBytesSent;
            }
            else
                lastBytesSent[numSecondsWaited] = curBytesSent;

            captureFPS = fpsCounter;
            fpsCounter = 0;

            bUpdateBPS = true;
        }

        fpsCounter++;

        if(network) curStrain = network->GetPacketStrain();

        EnableBlending(TRUE);
        BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA);

        //------------------------------------
        // render the mini render texture

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(mainPixelShader);

        SetRenderTarget(mainRenderTextures[curRenderTarget]);

        Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0, 0, baseSize.x, baseSize.y);

        if(scene)
            scene->Render();

        //------------------------------------

        if(bTransitioning)
        {
            if(!transitionTexture)
            {
                transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE);
                if(transitionTexture)
                {
                    D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture);
                    D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]);
                    GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture);
                }
                else
                    bTransitioning = false;
            }
            else if(transitionAlpha >= 1.0f)
            {
                delete transitionTexture;
                transitionTexture = NULL;

                bTransitioning = false;
            }
        }

        if(bTransitioning)
        {
            EnableBlending(TRUE);
            transitionAlpha += float(fSeconds)*5.0f;
            if(transitionAlpha > 1.0f)
                transitionAlpha = 1.0f;
        }
        else
            EnableBlending(FALSE);

        //------------------------------------
        // render the mini view thingy

        if (bProjector) {
            SetRenderTarget(projectorTexture);

            Vect2 renderFrameSize, renderFrameOffset;
            Vect2 projectorSize = Vect2(float(projectorWidth), float(projectorHeight));

            float projectorAspect = (projectorSize.x / projectorSize.y);
            float baseAspect = (baseSize.x / baseSize.y);

            if (projectorAspect < baseAspect) {
                float fProjectorWidth = float(projectorWidth);

                renderFrameSize   = Vect2(fProjectorWidth, fProjectorWidth / baseAspect);
                renderFrameOffset = Vect2(0.0f, (projectorSize.y-renderFrameSize.y) * 0.5f);
            } else {
                float fProjectorHeight = float(projectorHeight);

                renderFrameSize   = Vect2(fProjectorHeight * baseAspect, fProjectorHeight);
                renderFrameOffset = Vect2((projectorSize.x-renderFrameSize.x) * 0.5f, 0.0f);
            }

            DrawPreview(renderFrameSize, renderFrameOffset, projectorSize, curRenderTarget, Preview_Projector);

            SetRenderTarget(NULL);
        }

        if(bRenderView)
        {
            // Cache
            const Vect2 renderFrameSize = GetRenderFrameSize();
            const Vect2 renderFrameOffset = GetRenderFrameOffset();
            const Vect2 renderFrameCtrlSize = GetRenderFrameControlSize();

            SetRenderTarget(NULL);
            DrawPreview(renderFrameSize, renderFrameOffset, renderFrameCtrlSize, curRenderTarget,
                    bFullscreenMode ? Preview_Fullscreen : Preview_Standard);

            //draw selections if in edit mode
            if(bEditMode && !bSizeChanging)
            {
                if(scene) {
                    LoadVertexShader(solidVertexShader);
                    LoadPixelShader(solidPixelShader);
                    solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFF0000);
                    scene->RenderSelections(solidPixelShader);
                }
            }
        }
        else if(bForceRenderViewErase)
        {
            InvalidateRect(hwndRenderFrame, NULL, TRUE);
            UpdateWindow(hwndRenderFrame);
            bForceRenderViewErase = false;
        }

        //------------------------------------
        // actual stream output

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(yuvScalePixelShader);

        Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget];
        SetRenderTarget(yuvRenderTexture);

        switch(colorDesc.matrix)
        {
        case ColorMatrix_GBR:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[0] : (float*)yuvMat[0]);
            break;
        case ColorMatrix_YCgCo:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[1] : (float*)yuvMat[1]);
            break;
        case ColorMatrix_BT2020NCL:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[2] : (float*)yuvMat[2]);
            break;
        case ColorMatrix_BT709:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[3] : (float*)yuvMat[3]);
            break;
        case ColorMatrix_SMPTE240M:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[4] : (float*)yuvMat[4]);
            break;
        default:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[5] : (float*)yuvMat[5]);
        }

        if(downscale < 2.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize);
        else if(downscale < 3.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/(outputSize*3.0f));

        Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y);

        //why am I using scaleSize instead of outputSize for the texture?
        //because outputSize can be trimmed by up to three pixels due to 128-bit alignment.
        //using the scale function with outputSize can cause slightly inaccurate scaled images
        if(bTransitioning)
        {
            BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
            DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, 1.0f, 1.0f);
            BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
        }

        DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, 1.0f, 1.0f);

        //------------------------------------

        if (bProjector && !copyWait)
            projectorSwap->Present(0, 0);

        if(bRenderView && !copyWait)
            static_cast<D3D10System*>(GS)->swap->Present(0, 0);

        OSLeaveMutex(hSceneMutex);

        //------------------------------------
        // present/upload

        profileIn("GPU download and conversion");

        bEncode = true;

        if(copyWait)
        {
            copyWait--;
            bEncode = false;
        }
        else
        {
            //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing
            if(!bRecievedFirstAudioFrame)
            {
                static bool bWarnedAboutNoAudio = false;
                if (renderStartTimeMS-firstFrameTimeMS > 10000 && !bWarnedAboutNoAudio)
                {
                    bWarnedAboutNoAudio = true;
                    //AddStreamInfo (TEXT ("WARNING: OBS is not receiving audio frames. Please check your audio devices."), StreamInfoPriority_Critical); 
                }
                bEncode = false;
            }
            else if(bFirstFrame)
            {
                firstFrameTimestamp = lastStreamTime/1000000;
                bFirstFrame = false;
            }

            if(!bEncode)
            {
                if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                    curYUVTexture = 0;
                else
                    curYUVTexture++;
            }
        }

        lastStreamTime = curStreamTime;

        if(bEncode)
        {
            UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1;

            ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
            profileIn("CopyResource");

            if(!bFirstEncode && bUseThreaded420)
            {
                WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE);
                copyTexture->Unmap(0);
            }

            D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]);
            GetD3D()->CopyResource(copyTexture, d3dYUV->texture);
            profileOut;

            ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture];

            if(bFirstImage) //ignore the first frame
                bFirstImage = false;
            else
            {
                HRESULT result;
                D3D10_MAPPED_TEXTURE2D map;
                if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map)))
                {
                    int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1;
                    int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1;

                    EncoderPicture &prevPicOut = outPics[prevOutBuffer];
                    EncoderPicture &picOut = outPics[curOutBuffer];
                    EncoderPicture &nextPicOut = outPics[nextOutBuffer];

                    if(!bUsing444)
                    {
                        profileIn("conversion to 4:2:0");

                        if(bUseThreaded420)
                        {
                            for(int i=0; i<numThreads; i++)
                            {
                                convertInfo[i].input     = (LPBYTE)map.pData;
                                convertInfo[i].inPitch   = map.RowPitch;
                                if(bUsingQSV)
                                {
                                    mfxFrameData& data = nextPicOut.mfxOut->Data;
                                    videoEncoder->RequestBuffers(&data);
                                    convertInfo[i].outPitch  = data.Pitch;
                                    convertInfo[i].output[0] = data.Y;
                                    convertInfo[i].output[1] = data.UV;
                                }
                                else
                                {
                                    convertInfo[i].output[0] = nextPicOut.picOut->img.plane[0];
                                    convertInfo[i].output[1] = nextPicOut.picOut->img.plane[1];
                                    convertInfo[i].output[2] = nextPicOut.picOut->img.plane[2];
								}
                                SetEvent(convertInfo[i].hSignalConvert);
                            }

                            if(bFirstEncode)
                                bFirstEncode = bEncode = false;
                        }
                        else
                        {
                            if(bUsingQSV)
                            {
                                mfxFrameData& data = picOut.mfxOut->Data;
                                videoEncoder->RequestBuffers(&data);
                                LPBYTE output[] = {data.Y, data.UV};
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, data.Pitch, outputCY, 0, outputCY, output);
                            }
                            else
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, outputCX, outputCY, 0, outputCY, picOut.picOut->img.plane);
                            prevTexture->Unmap(0);
                        }

                        profileOut;
                    }

                    if(bEncode)
                    {
                        //encodeThreadProfiler.reset(::new ProfilerNode(TEXT("EncodeThread"), true));
                        //encodeThreadProfiler->MonitorThread(hEncodeThread);
                        curFramePic = &picOut;
                    }

                    curOutBuffer = nextOutBuffer;
                }
                else
                {
                    //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled
                    if (result == DXGI_ERROR_DEVICE_REMOVED)
                    {
                        String message;

                        HRESULT reason = GetD3D()->GetDeviceRemovedReason();

                        switch (reason)
                        {
                        case DXGI_ERROR_DEVICE_RESET:
                        case DXGI_ERROR_DEVICE_HUNG:
                            message = TEXT("Your video card or driver froze and was reset. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DEVICE_REMOVED:
                            message = TEXT("Your video card disappeared from the system. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DRIVER_INTERNAL_ERROR:
                            message = TEXT("Your video driver reported an internal error. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_INVALID_CALL:
                            message = TEXT("Your video driver reported an invalid call. Please check for possible driver issues.");
                            break;
                        default:
                            message = TEXT("DXGI_ERROR_DEVICE_REMOVED");
                            break;
                        }

                        message << TEXT(" This error can also occur if you have enabled opencl in x264 custom settings.");

                        CrashError (TEXT("Texture->Map failed: 0x%08x 0x%08x\r\n\r\n%s"), result, reason, message.Array());
                    }
                    else
                        CrashError (TEXT("Texture->Map failed: 0x%08x"), result);
                }
            }

            if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
                curCopyTexture = 0;
            else
                curCopyTexture++;

            if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                curYUVTexture = 0;
            else
                curYUVTexture++;

            if (bCongestionControl && bDynamicBitrateSupported && !bTestStream && totalStreamTime > 15000)
            {
                if (curStrain > 25)
                {
                    if (renderStartTimeMS - lastAdjustmentTime > 1500)
                    {
                        if (currentBitRate > 100)
                        {
                            currentBitRate = (int)(currentBitRate * (1.0 - (curStrain / 400)));
                            App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                            if (!adjustmentStreamId)
                                adjustmentStreamId = App->AddStreamInfo (FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                            else
                                App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array());

                            bUpdateBPS = true;
                        }

                        lastAdjustmentTime = renderStartTimeMS;
                    }
                }
                else if (currentBitRate < defaultBitRate && curStrain < 5 && lastStrain < 5)
                {
                    if (renderStartTimeMS - lastAdjustmentTime > 5000)
                    {
                        if (currentBitRate < defaultBitRate)
                        {
                            currentBitRate += (int)(defaultBitRate * 0.05);
                            if (currentBitRate > defaultBitRate)
                                currentBitRate = defaultBitRate;
                        }

                        App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                        /*if (!adjustmentStreamId)
                            App->AddStreamInfo (FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                        else
                            App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array());*/

                        App->RemoveStreamInfo(adjustmentStreamId);
                        adjustmentStreamId = 0;

                        bUpdateBPS = true;

                        lastAdjustmentTime = renderStartTimeMS;
                    }
                }
            }
        }

        lastRenderTarget = curRenderTarget;

        if(curRenderTarget == (NUM_RENDER_BUFFERS-1))
            curRenderTarget = 0;
        else
            curRenderTarget++;

        if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped)
        {
            PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);
            lastStrain = curStrain;

            lastFramesDropped = curFramesDropped;
        }

        //------------------------------------
        // we're about to sleep so we should flush the d3d command queue
        profileIn("flush");
        GetD3D()->Flush();
        profileOut;
        profileOut;
        profileOut; //frame

        //------------------------------------
        // frame sync

        //QWORD renderStopTime = GetQPCTimeNS();

        if(bWasLaggedFrame = (frameDelta > frameLengthNS))
        {
            numLongFrames++;
            if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage)
                DumpLastProfileData();
        }

        //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime);

        numTotalFrames++;
    }

    DisableProjector();

    //encodeThreadProfiler.reset();

    if(!bUsing444)
    {
        if(bUseThreaded420)
        {
            for(int i=0; i<numThreads; i++)
            {
                if(h420Threads[i])
                {
                    convertInfo[i].bKillThread = true;
                    SetEvent(convertInfo[i].hSignalConvert);

                    OSTerminateThread(h420Threads[i], 10000);
                    h420Threads[i] = NULL;
                }

                if(convertInfo[i].hSignalConvert)
                {
                    CloseHandle(convertInfo[i].hSignalConvert);
                    convertInfo[i].hSignalConvert = NULL;
                }

                if(convertInfo[i].hSignalComplete)
                {
                    CloseHandle(convertInfo[i].hSignalComplete);
                    convertInfo[i].hSignalComplete = NULL;
                }
            }

            if(!bFirstEncode)
            {
                ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
                copyTexture->Unmap(0);
            }
        }

        if(bUsingQSV)
            for(int i = 0; i < NUM_OUT_BUFFERS; i++)
                delete outPics[i].mfxOut;
        else
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
            {
                x264_picture_clean(outPics[i].picOut);
                delete outPics[i].picOut;
            }
    }

    Free(h420Threads);
    Free(convertInfo);

    Log(TEXT("Total frames rendered: %d, number of late frames: %d (%0.2f%%) (it's okay for some frames to be late)"), numTotalFrames, numLongFrames, (numTotalFrames > 0) ? (double(numLongFrames)/double(numTotalFrames))*100.0 : 0.0f);
}
示例#15
0
//todo: this function is an abomination, this is just disgusting.  fix it.
//...seriously, this is really, really horrible.  I mean this is amazingly bad.
void OBS::MainCaptureLoop()
{
    int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0;
    int copyWait = NUM_RENDER_BUFFERS-1;

    bSentHeaders = false;
    bFirstAudioPacket = true;

    bool bLogLongFramesProfile = GlobalConfig->GetInt(TEXT("General"), TEXT("LogLongFramesProfile"), LOGLONGFRAMESDEFAULT) != 0;
    float logLongFramesProfilePercentage = GlobalConfig->GetFloat(TEXT("General"), TEXT("LogLongFramesProfilePercentage"), 10.f);

    Vect2 baseSize    = Vect2(float(baseCX), float(baseCY));
    Vect2 outputSize  = Vect2(float(outputCX), float(outputCY));
    Vect2 scaleSize   = Vect2(float(scaleCX), float(scaleCY));

    HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI"));

    //----------------------------------------
    // x264 input buffers

    int curOutBuffer = 0;

    bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;
    if(bUsingQSV)
        bUsing444 = false;

    EncoderPicture lastPic;
    EncoderPicture outPics[NUM_OUT_BUFFERS];
    DWORD outTimes[NUM_OUT_BUFFERS] = {0, 0, 0};

    for(int i=0; i<NUM_OUT_BUFFERS; i++)
    {
        if(bUsingQSV)
        {
            outPics[i].mfxOut = new mfxFrameSurface1;
            memset(outPics[i].mfxOut, 0, sizeof(mfxFrameSurface1));
            mfxFrameData& data = outPics[i].mfxOut->Data;
            videoEncoder->RequestBuffers(&data);
        }
        else
        {
            outPics[i].picOut = new x264_picture_t;
            x264_picture_init(outPics[i].picOut);
        }
    }

    if(bUsing444)
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
        {
            outPics[i].picOut->img.i_csp   = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
            outPics[i].picOut->img.i_plane = 1;
        }
    }
    else
    {
        if(!bUsingQSV)
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
                x264_picture_alloc(outPics[i].picOut, X264_CSP_NV12, outputCX, outputCY);
    }

    int bCongestionControl = AppConfig->GetInt (TEXT("Video Encoding"), TEXT("CongestionControl"), 0);
    bool bDynamicBitrateSupported = App->GetVideoEncoder()->DynamicBitrateSupported();
    int defaultBitRate = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int currentBitRate = defaultBitRate;
    QWORD lastAdjustmentTime = 0;
    UINT adjustmentStreamId = 0;

    //----------------------------------------
    // time/timestamp stuff

    bufferedTimes.Clear();
    ctsOffsets.Clear();
    int bufferedFrames = 1; //to avoid constantly polling number of frames

#ifdef USE_100NS_TIME
    QWORD streamTimeStart = GetQPCTime100NS();
    QWORD frameTime100ns = 10000000/fps;

    QWORD sleepTargetTime = 0;
    bool bWasLaggedFrame = false;
#else
    DWORD streamTimeStart = OSGetTime();
    DWORD fpsTimeAdjust = 0;
#endif
    totalStreamTime = 0;

    lastAudioTimestamp = 0;

    latestVideoTime = firstSceneTimestamp = GetQPCTimeMS();

    DWORD fpsTimeNumerator = 1000-(frameTime*fps);
    DWORD fpsTimeDenominator = fps;
    DWORD cfrTime = 0;
    DWORD cfrTimeAdjust = 0;

    //----------------------------------------
    // start audio capture streams

    desktopAudio->StartCapture();
    if(micAudio) micAudio->StartCapture();

    //----------------------------------------
    // status bar/statistics stuff

    DWORD fpsCounter = 0;

    int numLongFrames = 0;
    int numTotalFrames = 0;

    int numTotalDuplicatedFrames = 0;

    bytesPerSec = 0;
    captureFPS = 0;
    curFramesDropped = 0;
    curStrain = 0.0;
    PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);

    QWORD lastBytesSent[3] = {0, 0, 0};
    DWORD lastFramesDropped = 0;
#ifdef USE_100NS_TIME
    double bpsTime = 0.0;
#else
    float bpsTime = 0.0f;
#endif
    double lastStrain = 0.0f;
    DWORD numSecondsWaited = 0;

    //----------------------------------------
    // 444->420 thread data

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads);
    Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads);

    zero(h420Threads, sizeof(HANDLE)*numThreads);
    zero(convertInfo, sizeof(Convert444Data)*numThreads);

    for(int i=0; i<numThreads; i++)
    {
        convertInfo[i].width  = outputCX;
        convertInfo[i].height = outputCY;
        convertInfo[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].bNV12 = bUsingQSV;

        if(i == 0)
            convertInfo[i].startY = 0;
        else
            convertInfo[i].startY = convertInfo[i-1].endY;

        if(i == (numThreads-1))
            convertInfo[i].endY = outputCY;
        else
            convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bool bFirstFrame = true;
    bool bFirstImage = true;
    bool bFirst420Encode = true;
    bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444;

    List<HANDLE> completeEvents;

    if(bUseThreaded420)
    {
        for(int i=0; i<numThreads; i++)
        {
            h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i);
            completeEvents << convertInfo[i].hSignalComplete;
        }
    }

    //----------------------------------------

    QWORD curStreamTime = 0, lastStreamTime, firstFrameTime = GetQPCTimeMS();

#ifdef USE_100NS_TIME
    lastStreamTime = GetQPCTime100NS()-frameTime100ns;
#else
    lastStreamTime = firstFrameTime-frameTime;
#endif

    //bool bFirstAudioPacket = true;

    List<ProfilerNode> threadedProfilers;
    bool bUsingThreadedProfilers = false;

    while(bRunning || bufferedFrames)
    {
#ifdef USE_100NS_TIME
        QWORD renderStartTime = GetQPCTime100NS();
        totalStreamTime = DWORD((renderStartTime-streamTimeStart)/10000);

        if(sleepTargetTime == 0 || bWasLaggedFrame)
            sleepTargetTime = renderStartTime;
#else
        DWORD renderStartTime = OSGetTime();
        totalStreamTime = renderStartTime-streamTimeStart;

        DWORD frameTimeAdjust = frameTime;
        fpsTimeAdjust += fpsTimeNumerator;
        if(fpsTimeAdjust > fpsTimeDenominator)
        {
            fpsTimeAdjust -= fpsTimeDenominator;
            ++frameTimeAdjust;
        }
#endif

        bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled;

        profileIn("frame");

#ifdef USE_100NS_TIME
        QWORD qwTime = renderStartTime/10000;
        latestVideoTime = qwTime;

        QWORD frameDelta = renderStartTime-lastStreamTime;
        double fSeconds = double(frameDelta)*0.0000001;

        //Log(TEXT("frameDelta: %f"), fSeconds);

        lastStreamTime = renderStartTime;
#else
        QWORD qwTime = GetQPCTimeMS();
        latestVideoTime = qwTime;

        QWORD frameDelta = qwTime-lastStreamTime;
        float fSeconds = float(frameDelta)*0.001f;

        //Log(TEXT("frameDelta: %llu"), frameDelta);

        lastStreamTime = qwTime;
#endif

        bool bUpdateBPS = false;
        profileIn("frame preprocessing and rendering");

        //------------------------------------

        if(bRequestKeyframe && keyframeWait > 0)
        {
            keyframeWait -= int(frameDelta);

            if(keyframeWait <= 0)
            {
                GetVideoEncoder()->RequestKeyframe();
                bRequestKeyframe = false;
            }
        }

        if(!bPushToTalkDown && pushToTalkTimeLeft > 0)
        {
            pushToTalkTimeLeft -= int(frameDelta);
            OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft);
            if(pushToTalkTimeLeft <= 0)
            {
                pushToTalkTimeLeft = 0;
                bPushToTalkOn = false;
            }
        }

        //------------------------------------

        OSEnterMutex(hSceneMutex);

        if(bResizeRenderView)
        {
            GS->ResizeView();
            bResizeRenderView = false;
        }

        //------------------------------------

        if(scene)
        {
            profileIn("scene->Preprocess");
            scene->Preprocess();

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Preprocess();

            profileOut;

            scene->Tick(float(fSeconds));

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Tick(float(fSeconds));
        }

        //------------------------------------

        QWORD curBytesSent = network->GetCurrentSentBytes();
        curFramesDropped = network->NumDroppedFrames();

        bpsTime += fSeconds;
        if(bpsTime > 1.0f)
        {
            if(numSecondsWaited < 3)
                ++numSecondsWaited;

            //bytesPerSec = DWORD(curBytesSent - lastBytesSent);
            bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited;

            if(bpsTime > 2.0)
                bpsTime = 0.0f;
            else
                bpsTime -= 1.0;

            if(numSecondsWaited == 3)
            {
                lastBytesSent[0] = lastBytesSent[1];
                lastBytesSent[1] = lastBytesSent[2];
                lastBytesSent[2] = curBytesSent;
            }
            else
                lastBytesSent[numSecondsWaited] = curBytesSent;

            captureFPS = fpsCounter;
            fpsCounter = 0;

            bUpdateBPS = true;
        }

        fpsCounter++;

        curStrain = network->GetPacketStrain();

        EnableBlending(TRUE);
        BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA);

        //------------------------------------
        // render the mini render texture

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(mainPixelShader);

        SetRenderTarget(mainRenderTextures[curRenderTarget]);

        Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0, 0, baseSize.x, baseSize.y);

        if(scene)
            scene->Render();

        //------------------------------------

        if(bTransitioning)
        {
            if(!transitionTexture)
            {
                transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE);
                if(transitionTexture)
                {
                    D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture);
                    D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]);
                    GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture);
                }
                else
                    bTransitioning = false;
            }
            else if(transitionAlpha >= 1.0f)
            {
                delete transitionTexture;
                transitionTexture = NULL;

                bTransitioning = false;
            }
        }

        if(bTransitioning)
        {
            EnableBlending(TRUE);
            transitionAlpha += float(fSeconds)*5.0f;
            if(transitionAlpha > 1.0f)
                transitionAlpha = 1.0f;
        }
        else
            EnableBlending(FALSE);

        //------------------------------------
        // render the mini view thingy

        if(bRenderView)
        {
            // Cache
            const Vect2 renderFrameSize = GetRenderFrameSize();
            const Vect2 renderFrameOffset = GetRenderFrameOffset();
            const Vect2 renderFrameCtrlSize = GetRenderFrameControlSize();

            SetRenderTarget(NULL);

            LoadVertexShader(mainVertexShader);
            LoadPixelShader(mainPixelShader);

            Ortho(0.0f, renderFrameCtrlSize.x, renderFrameCtrlSize.y, 0.0f, -100.0f, 100.0f);
            if(renderFrameCtrlSize.x != oldRenderFrameCtrlWidth || renderFrameCtrlSize.y != oldRenderFrameCtrlHeight)
            {
                // User is drag resizing the window. We don't recreate the swap chains so our coordinates are wrong
                SetViewport(0.0f, 0.0f, (float)oldRenderFrameCtrlWidth, (float)oldRenderFrameCtrlHeight);
            }
            else
                SetViewport(0.0f, 0.0f, renderFrameCtrlSize.x, renderFrameCtrlSize.y);

            // Draw background (Black if fullscreen, window colour otherwise)
            if(bFullscreenMode)
                ClearColorBuffer(0x000000);
            else
                ClearColorBuffer(GetSysColor(COLOR_BTNFACE));

            if(bTransitioning)
            {
                BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
                DrawSprite(transitionTexture, 0xFFFFFFFF,
                        renderFrameOffset.x, renderFrameOffset.y,
                        renderFrameOffset.x + renderFrameSize.x, renderFrameOffset.y + renderFrameSize.y);
                BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
            }

            DrawSprite(mainRenderTextures[curRenderTarget], 0xFFFFFFFF,
                    renderFrameOffset.x, renderFrameOffset.y,
                    renderFrameOffset.x + renderFrameSize.x, renderFrameOffset.y + renderFrameSize.y);

            //draw selections if in edit mode
            if(bEditMode && !bSizeChanging)
            {
                if(scene) {
                    LoadVertexShader(solidVertexShader);
                    LoadPixelShader(solidPixelShader);
                    solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFF0000);
                    scene->RenderSelections(solidPixelShader);
                }
            }
        }
        else if(bForceRenderViewErase)
        {
            InvalidateRect(hwndRenderFrame, NULL, TRUE);
            UpdateWindow(hwndRenderFrame);
            bForceRenderViewErase = false;
        }

        //------------------------------------
        // actual stream output

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(yuvScalePixelShader);

        Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget];
        SetRenderTarget(yuvRenderTexture);

        if(downscale < 2.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize);
        else if(downscale < 3.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/(outputSize*3.0f));

        Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y);

        //why am I using scaleSize instead of outputSize for the texture?
        //because outputSize can be trimmed by up to three pixels due to 128-bit alignment.
        //using the scale function with outputSize can cause slightly inaccurate scaled images
        if(bTransitioning)
        {
            BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
            DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, 1.0f, 1.0f);
            BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
        }

        DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, 1.0f, 1.0f);

        //------------------------------------

        if(bRenderView && !copyWait)
            static_cast<D3D10System*>(GS)->swap->Present(0, 0);

        OSLeaveMutex(hSceneMutex);

        profileOut;

        //------------------------------------
        // present/upload

        profileIn("video encoding and uploading");

        bool bEncode = true;

        if(copyWait)
        {
            copyWait--;
            bEncode = false;
        }
        else
        {
            //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing
            if(!bRecievedFirstAudioFrame)
            {
                static bool bWarnedAboutNoAudio = false;
                if (qwTime-firstFrameTime > 10000 && !bWarnedAboutNoAudio)
                {
                    bWarnedAboutNoAudio = true;
                    //AddStreamInfo (TEXT ("WARNING: OBS is not receiving audio frames. Please check your audio devices."), StreamInfoPriority_Critical); 
                }
                bEncode = false;
            }
            else if(bFirstFrame)
            {
                firstFrameTime = qwTime;
                bFirstFrame = false;
            }

            if(!bEncode)
            {
                if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                    curYUVTexture = 0;
                else
                    curYUVTexture++;
            }
        }

        if(bEncode)
        {
            curStreamTime = qwTime-firstFrameTime;

            UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1;

            ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
            profileIn("CopyResource");

            if(!bFirst420Encode && bUseThreaded420)
            {
                WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE);
                copyTexture->Unmap(0);
            }

            D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]);
            GetD3D()->CopyResource(copyTexture, d3dYUV->texture);
            profileOut;

            ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture];

            if(bFirstImage) //ignore the first frame
                bFirstImage = false;
            else
            {
                HRESULT result;
                D3D10_MAPPED_TEXTURE2D map;
                if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map)))
                {
                    int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1;
                    int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1;

                    EncoderPicture &prevPicOut = outPics[prevOutBuffer];
                    EncoderPicture &picOut = outPics[curOutBuffer];
                    EncoderPicture &nextPicOut = outPics[nextOutBuffer];

                    if(!bUsing444)
                    {
                        profileIn("conversion to 4:2:0");

                        if(bUseThreaded420)
                        {
                            outTimes[nextOutBuffer] = (DWORD)curStreamTime;

                            bool firstRun = threadedProfilers.Num() == 0;
                            if(firstRun)
                                threadedProfilers.SetSize(numThreads);

                            for(int i=0; i<numThreads; i++)
                            {
                                convertInfo[i].input     = (LPBYTE)map.pData;
                                convertInfo[i].inPitch   = map.RowPitch;
                                if(bUsingQSV)
                                {
                                    mfxFrameData& data = nextPicOut.mfxOut->Data;
                                    videoEncoder->RequestBuffers(&data);
                                    convertInfo[i].outPitch  = data.Pitch;
                                    convertInfo[i].output[0] = data.Y;
                                    convertInfo[i].output[1] = data.UV;
                                }
                                else
                                {
                                    convertInfo[i].output[0] = nextPicOut.picOut->img.plane[0];
                                    convertInfo[i].output[1] = nextPicOut.picOut->img.plane[1];
                                    convertInfo[i].output[2] = nextPicOut.picOut->img.plane[2];
								}
                                if(!firstRun)
                                    threadedProfilers[i].~ProfilerNode();
                                ::new (&threadedProfilers[i]) ProfilerNode(TEXT("Convert444Threads"), true);
                                threadedProfilers[i].MonitorThread(h420Threads[i]);
                                bUsingThreadedProfilers = true;
                                SetEvent(convertInfo[i].hSignalConvert);
                            }

                            if(bFirst420Encode)
                                bFirst420Encode = bEncode = false;
                        }
                        else
                        {
                            outTimes[curOutBuffer] = (DWORD)curStreamTime;
                            if(bUsingQSV)
                            {
                                mfxFrameData& data = picOut.mfxOut->Data;
                                videoEncoder->RequestBuffers(&data);
                                LPBYTE output[] = {data.Y, data.UV};
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, data.Pitch, outputCY, 0, outputCY, output);
                            }
                            else
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, outputCX, outputCY, 0, outputCY, picOut.picOut->img.plane);
                            prevTexture->Unmap(0);
                        }

                        profileOut;
                    }
                    else
                    {
                        outTimes[curOutBuffer] = (DWORD)curStreamTime;

                        picOut.picOut->img.i_stride[0] = map.RowPitch;
                        picOut.picOut->img.plane[0]    = (uint8_t*)map.pData;
                    }

                    if(bEncode)
                    {
                        DWORD curFrameTimestamp = outTimes[prevOutBuffer];
                        //Log(TEXT("curFrameTimestamp: %u"), curFrameTimestamp);

                        //------------------------------------

                        FrameProcessInfo frameInfo;
                        frameInfo.firstFrameTime = firstFrameTime;
                        frameInfo.prevTexture = prevTexture;

                        if(bDupeFrames)
                        {
                            while(cfrTime < curFrameTimestamp)
                            {
                                DWORD frameTimeAdjust = frameTime;
                                cfrTimeAdjust += fpsTimeNumerator;
                                if(cfrTimeAdjust > fpsTimeDenominator)
                                {
                                    cfrTimeAdjust -= fpsTimeDenominator;
                                    ++frameTimeAdjust;
                                }

                                DWORD halfTime = (frameTimeAdjust+1)/2;

                                EncoderPicture &nextPic = (curFrameTimestamp-cfrTime <= halfTime) ? picOut : prevPicOut;
                                //Log(TEXT("cfrTime: %u, time: %u"), cfrTime, curFrameTimestamp);

                                //these lines are just for counting duped frames
                                if(nextPic == lastPic)
                                    ++numTotalDuplicatedFrames;
                                else
                                    lastPic = nextPic;

                                frameInfo.pic = &nextPic;
                                if(bUsingQSV)
                                    frameInfo.pic->mfxOut->Data.TimeStamp = cfrTime;
                                else
                                    frameInfo.pic->picOut->i_pts = cfrTime;
                                frameInfo.frameTimestamp = cfrTime;
                                ProcessFrame(frameInfo);

                                cfrTime += frameTimeAdjust;

                                //Log(TEXT("cfrTime: %u, chi frame: %u"), cfrTime, (curFrameTimestamp-cfrTime <= halfTime));
                            }
                        }
                        else
                        {
                            if(bUsingQSV)
                                picOut.mfxOut->Data.TimeStamp = curFrameTimestamp;
                            else
                                picOut.picOut->i_pts = curFrameTimestamp;

                            frameInfo.pic = &picOut;
                            frameInfo.frameTimestamp = curFrameTimestamp;

                            ProcessFrame(frameInfo);
                        }

                        if (!bRunning)
                            bufferedFrames = videoEncoder->GetBufferedFrames ();
                    }

                    if(bUsing444)
                    {
                        prevTexture->Unmap(0);
                    }

                    curOutBuffer = nextOutBuffer;
                }
                else
                {
                    //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled
                    if (result == DXGI_ERROR_DEVICE_REMOVED)
                    {
                        String message;

                        HRESULT reason = GetD3D()->GetDeviceRemovedReason();

                        switch (reason)
                        {
                        case DXGI_ERROR_DEVICE_RESET:
                        case DXGI_ERROR_DEVICE_HUNG:
                            message = TEXT("Your video card or driver froze and was reset. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DEVICE_REMOVED:
                            message = TEXT("Your video card disappeared from the system. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DRIVER_INTERNAL_ERROR:
                            message = TEXT("Your video driver reported an internal error. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_INVALID_CALL:
                            message = TEXT("Your video driver reported an invalid call. Please check for possible driver issues.");
                            break;
                        default:
                            message = TEXT("DXGI_ERROR_DEVICE_REMOVED");
                            break;
                        }

                        CrashError (TEXT("Texture->Map failed: 0x%08x 0x%08x\r\n\r\n%s"), result, reason, message.Array());
                    }
                    else
                        CrashError (TEXT("Texture->Map failed: 0x%08x"), result);
                }
            }

            if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
                curCopyTexture = 0;
            else
                curCopyTexture++;

            if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                curYUVTexture = 0;
            else
                curYUVTexture++;

            if (bCongestionControl && bDynamicBitrateSupported && !bTestStream)
            {
                if (curStrain > 25)
                {
                    if (qwTime - lastAdjustmentTime > 1500)
                    {
                        if (currentBitRate > 100)
                        {
                            currentBitRate = (int)(currentBitRate * (1.0 - (curStrain / 400)));
                            App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                            if (!adjustmentStreamId)
                                adjustmentStreamId = App->AddStreamInfo (FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                            else
                                App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array());

                            bUpdateBPS = true;
                        }

                        lastAdjustmentTime = qwTime;
                    }
                }
                else if (currentBitRate < defaultBitRate && curStrain < 5 && lastStrain < 5)
                {
                    if (qwTime - lastAdjustmentTime > 5000)
                    {
                        if (currentBitRate < defaultBitRate)
                        {
                            currentBitRate += (int)(defaultBitRate * 0.05);
                            if (currentBitRate > defaultBitRate)
                                currentBitRate = defaultBitRate;
                        }

                        App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                        /*if (!adjustmentStreamId)
                            App->AddStreamInfo (FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                        else
                            App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array());*/

                        bUpdateBPS = true;

                        lastAdjustmentTime = qwTime;
                    }
                }
            }
        }

        lastRenderTarget = curRenderTarget;

        if(curRenderTarget == (NUM_RENDER_BUFFERS-1))
            curRenderTarget = 0;
        else
            curRenderTarget++;

        if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped)
        {
            PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);
            lastStrain = curStrain;

            lastFramesDropped = curFramesDropped;
        }

        //------------------------------------
        // we're about to sleep so we should flush the d3d command queue
        profileIn("flush");
        GetD3D()->Flush();
        profileOut;

        profileOut; //video encoding and uploading
        profileOut; //frame

        //------------------------------------
        // frame sync

#ifdef USE_100NS_TIME
        QWORD renderStopTime = GetQPCTime100NS();
        sleepTargetTime += frameTime100ns;

        if(bWasLaggedFrame = (sleepTargetTime <= renderStopTime))
        {
            numLongFrames++;
            if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage)
                DumpLastProfileData();
        }
        else
            SleepTo(sleepTargetTime);
#else
        DWORD renderStopTime = OSGetTime();
        DWORD totalTime = renderStopTime-renderStartTime;

        if(totalTime > frameTimeAdjust)
        {
            numLongFrames++;
            if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage)
                DumpLastProfileData();
        }
        else if(totalTime < frameTimeAdjust)
            OSSleep(frameTimeAdjust-totalTime);
#endif

        //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime);

        numTotalFrames++;
    }

    if(!bUsing444)
    {
        if(bUseThreaded420)
        {
            for(int i=0; i<numThreads; i++)
            {
                if(h420Threads[i])
                {
                    convertInfo[i].bKillThread = true;
                    SetEvent(convertInfo[i].hSignalConvert);
                    if(bUsingThreadedProfilers)
                        threadedProfilers[i].~ProfilerNode();

                    OSTerminateThread(h420Threads[i], 10000);
                    h420Threads[i] = NULL;
                }

                if(convertInfo[i].hSignalConvert)
                {
                    CloseHandle(convertInfo[i].hSignalConvert);
                    convertInfo[i].hSignalConvert = NULL;
                }

                if(convertInfo[i].hSignalComplete)
                {
                    CloseHandle(convertInfo[i].hSignalComplete);
                    convertInfo[i].hSignalComplete = NULL;
                }
            }

            if(!bFirst420Encode)
            {
                ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
                copyTexture->Unmap(0);
            }
        }

        if(bUsingQSV)
            for(int i = 0; i < NUM_OUT_BUFFERS; i++)
                delete outPics[i].mfxOut;
        else
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
            {
                x264_picture_clean(outPics[i].picOut);
                delete outPics[i].picOut;
            }
    }

    Free(h420Threads);
    Free(convertInfo);

    Log(TEXT("Total frames rendered: %d, number of frames that lagged: %d (%0.2f%%) (it's okay for some frames to lag)"), numTotalFrames, numLongFrames, (double(numLongFrames)/double(numTotalFrames))*100.0);
    if(bDupeFrames)
        Log(TEXT("Total duplicated frames: %d (%0.2f%%)"), numTotalDuplicatedFrames, (double(numTotalDuplicatedFrames)/double(numTotalFrames))*100.0);
}
示例#16
0
int main(int argc, char* argv[]){
    x264_param_t param;
    x264_t *h = NULL;
    x264_picture_t pic_in;
    x264_picture_t pic_out;
    x264_nal_t *nal;
    uint8_t *data = NULL;
    int widthXheight = width * height;
    int frame_size = width * height * 1.5;
    int read_sum = 0, write_sum = 0;
    int frames = 0;
    int i, rnum, i_size;
    x264_nal_t* pNals = NULL;

    x264_param_default(&param);
    param.i_width = width;
    param.i_height = height;
    param.i_bframe = 3;
    param.i_fps_num = 25;
    param.i_fps_den = 1;
    param.b_vfr_input = 0;
    param.i_keyint_max = 250;
    param.rc.i_bitrate = 1500;
    param.i_scenecut_threshold = 40;
    param.i_level_idc = 51;

    x264_param_apply_profile(&param, "high");

    h = x264_encoder_open( &param );

//    printf("param.rc.i_qp_min=%d, param.rc.i_qp_max=%d, param.rc.i_qp_step=%d param.rc.i_qp_constant=%d param.rc.i_rc_method=%d\n",
//            param.rc.i_qp_min, param.rc.i_qp_max, param.rc.i_qp_step, param.rc.i_qp_constant, param.rc.i_rc_method);
    printf("param:%s\n", x264_param2string(&param, 1));


    x264_picture_init( &pic_in );
    x264_picture_alloc(&pic_in, X264_CSP_YV12, width, height);
    pic_in.img.i_csp = X264_CSP_YV12;
    pic_in.img.i_plane = 3;

    data = (uint8_t*)malloc(0x400000);

    FILE* fpr = fopen(MFILE ".yuv", "rb");
    FILE* fpw1 = fopen(MFILE".szhu.h264", "wb");
//    FILE* fpw2 = fopen(MFILE".h264", "wb");

    if(!fpr || !fpw1 ) {
        printf("file open failed\n");
        return -1;
    }

    while(!feof(fpr)){
        rnum = fread(data, 1, frame_size, fpr);
        if(rnum != frame_size){
            printf("read file failed\n");
            break;
        }
        memcpy(pic_in.img.plane[0], data, widthXheight);
        memcpy(pic_in.img.plane[1], data + widthXheight, widthXheight >> 2);
        memcpy(pic_in.img.plane[2], data + widthXheight + (widthXheight >> 2), widthXheight >> 2);
        read_sum += rnum;
        frames ++;
//        printf("read frames=%d %.2fMB write:%.2fMB\n", frames, read_sum * 1.0 / 0x100000, write_sum * 1.0 / 0x100000);
        int i_nal;
        int i_frame_size = 0;

        if(0 && frames % 12 == 0){
            pic_in.i_type = X264_TYPE_I;
        }else{
            pic_in.i_type = X264_TYPE_AUTO;
        }
        i_frame_size = x264_encoder_encode( h, &nal, &i_nal, &pic_in, &pic_out );

        if(i_frame_size <= 0){
            //printf("\t!!!FAILED encode frame \n");
        }else{
            fwrite(nal[0].p_payload, 1, i_frame_size, fpw1);
//            printf("\t+++i_frame_size=%d\n", i_frame_size);
            write_sum += i_frame_size;
        }
#if 0
        for(i = 0; i < i_nal; i ++){
            i_size = nal[i].i_payload;
//            fwrite(nal[i].p_payload, 1, nal[i].i_payload, fpw1);
            fwrite(nal[i].p_payload, 1, i_frame_size, fpw1);
            x264_nal_encode(h, data, &nal[i]);
            if(i_size != nal[i].i_payload){
                printf("\t\ti_size=%d nal[i].i_payload=%d\n", i_size, nal[i].i_payload);
            }
            fwrite(data, 1, nal[i].i_payload, fpw2);
        }
#endif
    }

    free(data);
    x264_picture_clean(&pic_in);
    x264_picture_clean(&pic_out);
    if(h){
        x264_encoder_close(h);
        h = NULL;
    }
    fclose(fpw1);
//    fclose(fpw2);
    fclose(fpr);
    printf("h=0x%X", h);
    return 0;
}
示例#17
0
int main(int argc, char* argv[])
{
	if (argc != 5)
	{
		printf("usage : app [input] [width] [height] [output]\n");
		return 0;
	}

	const char* input = argv[1];
	const char* out264 = argv[4];
	const int width = atoi(argv[2]);
	const int height = atoi(argv[3]);

	FILE *fin = 0, *fout = 0;
	fopen_s(&fin, input, "rb");
	fopen_s(&fout, out264, "wb");

	if (fin == 0 || fout == 0)
	{
		printf("openf file failed\n");
		return -1;
	}

	x264_param_t param;
	x264_param_default_preset(&param, "fast", "zerolatency");
	//x264_param_default(&param);

	param.i_width = width;
	param.i_height = height;
	param.i_bframe = 0;
	param.i_threads = 1;
	param.i_sync_lookahead = 0;
	//	param.b_deblocking_filter = 1;
	param.b_cabac = 1;
	param.i_fps_num = 25;
	param.i_fps_den = 1;
	param.i_level_idc = 30;
	param.i_keyint_min = param.i_fps_num;
	param.i_keyint_max = param.i_fps_num * 2;

	param.analyse.i_subpel_refine = 5;
	param.analyse.i_me_method = X264_ME_HEX;
	param.analyse.inter = X264_ANALYSE_I4x4 | X264_ANALYSE_I8x8 | X264_ANALYSE_PSUB16x16 | X264_ANALYSE_BSUB16x16;
	param.analyse.intra = X264_ANALYSE_I4x4 | X264_ANALYSE_I8x8;

	if ((param.analyse.inter | param.analyse.intra) & X264_ANALYSE_I8x8)
	{
		param.analyse.b_transform_8x8 = 1;
	}

	param.rc.i_lookahead = 0;
	param.rc.i_bitrate = 500;
	param.rc.i_vbv_max_bitrate = param.rc.i_bitrate;
	param.rc.i_vbv_buffer_size = param.rc.i_bitrate;
	param.rc.f_vbv_buffer_init = 0.7f;
	param.rc.b_mb_tree = 0;
	//	param.rc.i_qp_min = 2;
	//	param.rc.i_qp_max = 31;
	//	param.rc.f_qcompress = 0.5f;
	//	param.rc.i_qp_constant = 0;
	param.rc.i_rc_method = X264_RC_ABR;
	param.rc.f_rf_constant = 8.5f;
	param.rc.f_rf_constant_max = 20.0f;
	param.rc.f_rate_tolerance = 0.1f;
	param.rc.i_aq_mode = X264_AQ_AUTOVARIANCE;
	param.rc.f_aq_strength = 0.5f;
	param.b_repeat_headers = 0;
	//	param.b_annexb = 0;
#ifdef _DEBUG
	//	param.analyse.b_psnr = true;
	//	param.analyse.b_ssim = true;
	param.i_log_level = X264_LOG_INFO;
#endif

	x264_t* h264 = x264_encoder_open(&param);
	if (h264 == NULL)
	{
		printf("x264 open failed\n");
		return -1;
	}

	x264_picture_t pic_in;
	x264_picture_t pic_out;

	x264_picture_alloc(&pic_in, X264_CSP_I420, width, height);
	x264_picture_init(&pic_out);

	int bufferSize = width * height * 3 / 2;
	uchar* buffer = (uchar*)malloc(bufferSize);

	pic_in.img.i_csp = X264_CSP_I420;
	pic_in.img.i_plane = 3;
	pic_in.img.plane[0] = buffer;
	pic_in.img.plane[1] = buffer + width * height;
	pic_in.img.plane[2] = buffer + width * height * 5 / 4;
	pic_in.img.i_stride[0] = width;
	pic_in.img.i_stride[1] = width / 2;
	pic_in.img.i_stride[2] = width / 2;
	pic_in.i_pts = 0;

	int frameCount = 0;
	int encodeFrameCount = 0;
	int frameSize = 0;
	int i_nal = 0;
	x264_nal_t* p_nal = NULL;

	//从头部信息里面获取PPS、SPS等
	x264_encoder_headers(h264, &p_nal, &i_nal);

	for (int i = 0; i < i_nal; i++)
	{
		fwrite(p_nal[i].p_payload, 1, p_nal[i].i_payload, fout);
	}

	while (!feof(fin))
	{
		int bytes = fread(buffer, 1, bufferSize, fin);
		if (bytes != bufferSize)
		{
			break;
		}

		if (frameCount++ % 8 == 0)
		{
			pic_in.i_type = X264_TYPE_IDR;
		}
		else
		{
			pic_in.i_type = X264_TYPE_AUTO;
		}

		frameSize = x264_encoder_encode(h264, &p_nal, &i_nal, &pic_in, &pic_out);

		if (frameSize < 0)
		{
			printf("x264 encode failed\n");
			break;
		}

		if (frameSize > 0)
		{
			for (int i = 0; i < i_nal; i++)
			{
				fwrite(p_nal[i].p_payload, 1, p_nal[i].i_payload, fout);
			}

			encodeFrameCount++;
		}

		pic_in.i_pts++;
	}

	while (frameSize = x264_encoder_encode(h264, &p_nal, &i_nal, NULL, &pic_out))
	{
		for (int i = 0; i < i_nal; i++)
		{
			fwrite(p_nal[i].p_payload, 1, p_nal[i].i_payload, fout);
		}

		encodeFrameCount++;
	}

	free(buffer);
	x264_encoder_close(h264);
	fclose(fin);
	fclose(fout);

	printf("read video frame = %d\n", frameCount);
	printf("encode video frame = %d\n", encodeFrameCount);

	system("pause");
	return 0;
}
示例#18
0
int CLiveParser::Init(int nDataType)
{
	m_dateType = nDataType;

	if (m_pDataBuff == NULL)
		m_pDataBuff = new char[100 * 1024 * 1024];

	m_pX264Param = new x264_param_t;
	assert(m_pX264Param);

	//* 配置参数
	 //* 使用默认参数
	 x264_param_default(m_pX264Param);
	 //* cpuFlags
	 m_pX264Param->i_threads  = X264_SYNC_LOOKAHEAD_AUTO;//* 取空缓冲区继续使用不死锁的保证.
	 //* video Properties
	 m_pX264Param->i_width   = 640; //* 宽度.
	 m_pX264Param->i_height  = 480; //* 高度
	 m_pX264Param->i_frame_total = 0; //* 编码总帧数.不知道用0.
	 m_pX264Param->i_keyint_max = 100;
	 //* bitstream parameters
	 m_pX264Param->i_bframe  = 0;
	 m_pX264Param->b_open_gop  = 0;
	 //m_pX264Param->i_bframe_pyramid = 0;
	 //m_pX264Param->i_bframe_adaptive = X264_B_ADAPT_TRELLIS;

	 //* 宽高比,有效果,但不是想要的.
	 //m_pX264Param->vui.i_sar_width = 1080;
	 //m_pX264Param->vui.i_sar_height = 720;

	 //* Log
	 m_pX264Param->i_log_level  = X264_LOG_NONE;
	 //* Rate control Parameters
	 m_pX264Param->rc.i_bitrate = 1024;//* 码率(比特率,单位Kbps)
	 m_pX264Param->rc.b_mb_tree = 0;
	 //* muxing parameters
	 m_pX264Param->i_fps_den  = 1; //* 帧率分母
	 m_pX264Param->i_fps_num  = 25;//* 帧率分子
	 m_pX264Param->i_timebase_den = m_pX264Param->i_fps_num;
	 m_pX264Param->i_timebase_num = m_pX264Param->i_fps_den;

	 //* 设置Profile.使用MainProfile
	 x264_param_apply_profile(m_pX264Param, x264_profile_names[2]);

	 //* 打开编码器句柄,通过x264_encoder_parameters得到设置给X264
	  //* 的参数.通过x264_encoder_reconfig更新X264的参数
	  m_pX264Handle = x264_encoder_open(m_pX264Param);
	  assert(m_pX264Handle);

	  //* 获取整个流的PPS和SPS,不需要可以不调用.
	  /*int iResult = x264_encoder_headers(m_pX264Handle, &m_pNals, &m_iNal);
	  assert(iResult >= 0);
	  // PPS SPS 总共只有36B,如何解析出来呢?
	  for (int i = 0; i < m_iNal; ++i)
	  {
	   switch (m_pNals[i].i_type)
	   {
	   case NAL_SPS:
	    break;
	   case  NAL_PPS:
	    break;
	   default:
	    break;
	   }
	  }*/

	  //* 获取允许缓存的最大帧数.
	 int iMaxFrames = x264_encoder_maximum_delayed_frames(m_pX264Handle);
	 iMaxFrames = iMaxFrames;

	  //* 编码需要的参数.
	  m_iNal = 0;
	  m_pNals = NULL;
	  m_pPicIn = new x264_picture_t;
	  m_pPicOut = new x264_picture_t;

	  x264_picture_init(m_pPicOut);
	  x264_picture_alloc(m_pPicIn, X264_CSP_I420, m_pX264Param->i_width, m_pX264Param->i_height);
	  m_pPicIn->img.i_csp = X264_CSP_I420;
	  m_pPicIn->img.i_plane = 3;

	return J_OK;
}
int BleX264Encoder::encode(unsigned char *rgbframe, mint64 pts, void *opaque)
{
    Q_UNUSED(pts);
    unsigned char *src_buf = rgbframe;
    x264_picture_init(m_pictureIn);

    m_pictureIn->img.i_csp = X264_CSP_I420;
    m_pictureIn->img.i_plane = 3;
    m_pictureIn->i_type = X264_TYPE_AUTO;
    m_pictureIn->i_qpplus1 = 0;

    // @note why i_pts plus 1 everytime
    // because the timebase set as above.
    m_pictureIn->i_pts = ++m_encoded_frames;
    m_pictureIn->opaque = opaque;

    m_pictureIn->img.plane[0] = src_buf;
    m_pictureIn->img.plane[1] = src_buf + m_x264Param->i_height * m_x264Param->i_width;
    m_pictureIn->img.plane[2] = src_buf + m_x264Param->i_height * m_x264Param->i_width * 5 / 4;
    m_pictureIn->img.i_stride[0] = m_x264Param->i_width;
    m_pictureIn->img.i_stride[1] = m_x264Param->i_width >> 1;
    m_pictureIn->img.i_stride[2] = m_x264Param->i_width >> 1;

    x264_picture_t picOut;
    int nalNum;
    x264_nal_t* nalOut;
    int len = x264_encoder_encode(m_x264Encoder, &nalOut, &nalNum, m_pictureIn, &picOut);
    if (len < 0) {
        log_error("x264 encode failed");
        return -1;
    }

    if (nalNum <= 0) {
        log_warn("frame delayed in encoder.");
        return -2;
    }

    if(!bFirstFrameProcessed && nalNum)
    {
        if(picOut.i_dts < 0)
            delayOffset = int(-picOut.i_dts);

        bFirstFrameProcessed = true;
    }

    float timeOffset = float(picOut.i_pts - picOut.i_dts) * BleAVQueue::instance()->timestampBuilder()->videoInternal();

    BleVideoPacket *pkt = dynamic_cast<BleVideoPacket *> (BleAVQueue::instance()->find_unencoded_video());
    BleAssert(pkt != NULL);

    MStream &body = pkt->data;
    unsigned char frameType;
    if (IS_X264_TYPE_I(picOut.i_type)) {
        frameType = 0x17;
    } else {
        frameType = 0x27;
    }

    body.write1Bytes(frameType);
    body.write1Bytes(0x01);
    body.write3Bytes((int)timeOffset);

    // NALU payload : 4bytes size + payload
    // NALU payload size : 4bytes size + payload size
    // for b_repeat_headers = 0 in x264_param_t
    // so NALU type is only IDR, SLICE(P or B frame)
    // so you must get SPS PPS before encoding any frame.
    for (int i = 0; i < nalNum; ++i) {
        x264_nal_t &nal = nalOut[i];
        body.writeString((char*)nal.p_payload, nal.i_payload);
    }

    if (IS_X264_TYPE_I(picOut.i_type)) {
        log_trace("I");
    } else if (IS_X264_TYPE_B(picOut.i_type)) {
        log_trace("B");
    } else {
        log_trace("P");
    }

    BleAVQueue::instance()->update_packet(pkt);

    return 0;
}
示例#20
0
int initEncoder(ENCODER_S* pcEncoder,int nColorSpaceType,  int nWidth, int nHeight)
{
	if (NULL == pcEncoder)	
	{
		printf("[%s,%d] pcEncoder == NULL\n", __func__, __LINE__);	
		return -1;
	}

	pcEncoder->m_psParam = (x264_param_t*)malloc(sizeof(x264_param_t));
	if (NULL == pcEncoder->m_psParam)		
	{
		fprintf(stderr, "[%s,%d], malloc x264_param_t failed:%s\n", __func__, __LINE__, strerror(errno));		
		return -1;
	}

	x264_param_default(pcEncoder->m_psParam);

	pcEncoder->m_psParam->i_width = nWidth;
	pcEncoder->m_psParam->i_height = nHeight;
	pcEncoder->m_psParam->i_csp = nColorSpaceType;
	pcEncoder->m_psParam->i_fps_den = 1;
	pcEncoder->m_psParam->i_fps_num = 25;
	
	if (0 != x264_param_apply_profile(pcEncoder->m_psParam,x264_profile_names[5]))
	{
		fprintf(stderr, "[%s,%d] x264_param_apply_profile failed:%s\n", __func__, __LINE__, strerror(errno));	
		unInitEncoder(pcEncoder);
		//if (NULL != pcEncoder->m_psParam)
		//{
		//	unInitEncoder(pcEncoder->m_psParam);		
		//	pcEncoder->m_psParam = NULL;
		//}
		return -1;
	}

	pcEncoder->m_psHandle = x264_encoder_open(pcEncoder->m_psParam);
	if (NULL == pcEncoder->m_psHandle)
	{
		fprintf(stderr, "[%s,%d]x264_encoder_open failed:%s\n", __func__, __LINE__,strerror(errno));		
		unInitEncoder(pcEncoder);
		return -1;
	}
	pcEncoder->m_cIsEncodeOpen = 1;

	pcEncoder->m_psPic = (x264_picture_t*)malloc(sizeof(x264_picture_t));
	if (NULL == pcEncoder->m_psPic)
	{
		fprintf(stderr, "[%s,%d] malloc x264_picture_t failed:%s", __func__, __LINE__, strerror(errno));		
		unInitEncoder(pcEncoder);
		return -1;
	}
	pcEncoder->m_cIsPicAlloc = 1;

	x264_picture_init(pcEncoder->m_psPic);
	if (0 != x264_picture_alloc(pcEncoder->m_psPic, nColorSpaceType, nWidth, nHeight))
	{
		fprintf(stderr, "[%s,%d] x264_picture_alloc failed:%s", __func__, __LINE__, strerror(errno));		
		unInitEncoder(pcEncoder);
		return -1;
	}

	pcEncoder->m_psPic->img.i_plane = 3;
	return 0;
}
示例#21
0
//todo: this function is an abomination, this is just disgusting.  fix it.
//...seriously, this is really, really horrible.  I mean this is amazingly bad.
void OBS::MainCaptureLoop()
{
    int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0;
    int copyWait = NUM_RENDER_BUFFERS-1;

    bSentHeaders = false;
    bFirstAudioPacket = true;

    Vect2 baseSize    = Vect2(float(baseCX), float(baseCY));
    Vect2 outputSize  = Vect2(float(outputCX), float(outputCY));
    Vect2 scaleSize   = Vect2(float(scaleCX), float(scaleCY));

    HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI"));

    LPVOID nullBuff = NULL;

    //----------------------------------------
    // x264 input buffers

    int curOutBuffer = 0;

    x264_picture_t *lastPic = NULL;
    x264_picture_t outPics[NUM_OUT_BUFFERS];
    DWORD outTimes[NUM_OUT_BUFFERS] = {0, 0, 0};

    for(int i=0; i<NUM_OUT_BUFFERS; i++)
        x264_picture_init(&outPics[i]);

    if(bUsing444)
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
        {
            outPics[i].img.i_csp   = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
            outPics[i].img.i_plane = 1;
        }
    }
    else
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
            x264_picture_alloc(&outPics[i], X264_CSP_I420, outputCX, outputCY);
    }

    //----------------------------------------
    // time/timestamp stuff

    LARGE_INTEGER clockFreq;
    QueryPerformanceFrequency(&clockFreq);

    bufferedTimes.Clear();
    ctsOffsets.Clear();

#ifdef USE_100NS_TIME
    QWORD streamTimeStart = GetQPCTime100NS(clockFreq.QuadPart);
    QWORD frameTime100ns = 10000000/fps;

    QWORD sleepTargetTime = 0;
    bool bWasLaggedFrame = false;
#else
    DWORD streamTimeStart = OSGetTime();
#endif
    totalStreamTime = 0;

    lastAudioTimestamp = 0;

    latestVideoTime = firstSceneTimestamp = GetQPCTimeMS(clockFreq.QuadPart);

    DWORD fpsTimeNumerator = 1000-(frameTime*fps);
    DWORD fpsTimeDenominator = fps;
    DWORD fpsTimeAdjust = 0;

    DWORD cfrTime = 0;
    DWORD cfrTimeAdjust = 0;

    //----------------------------------------
    // start audio capture streams

    desktopAudio->StartCapture();
    if(micAudio) micAudio->StartCapture();

    //----------------------------------------
    // status bar/statistics stuff

    DWORD fpsCounter = 0;

    int numLongFrames = 0;
    int numTotalFrames = 0;

    int numTotalDuplicatedFrames = 0;

    bytesPerSec = 0;
    captureFPS = 0;
    curFramesDropped = 0;
    curStrain = 0.0;
    PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);

    QWORD lastBytesSent[3] = {0, 0, 0};
    DWORD lastFramesDropped = 0;
#ifdef USE_100NS_TIME
    double bpsTime = 0.0;
#else
    float bpsTime = 0.0f;
#endif
    double lastStrain = 0.0f;
    DWORD numSecondsWaited = 0;

    //----------------------------------------
    // 444->420 thread data

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads);
    Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads);

    zero(h420Threads, sizeof(HANDLE)*numThreads);
    zero(convertInfo, sizeof(Convert444Data)*numThreads);

    for(int i=0; i<numThreads; i++)
    {
        convertInfo[i].width  = outputCX;
        convertInfo[i].height = outputCY;
        convertInfo[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);

        if(i == 0)
            convertInfo[i].startY = 0;
        else
            convertInfo[i].startY = convertInfo[i-1].endY;

        if(i == (numThreads-1))
            convertInfo[i].endY = outputCY;
        else
            convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bool bFirstFrame = true;
    bool bFirstImage = true;
    bool bFirst420Encode = true;
    bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444;

    List<HANDLE> completeEvents;

    if(bUseThreaded420)
    {
        for(int i=0; i<numThreads; i++)
        {
            h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i);
            completeEvents << convertInfo[i].hSignalComplete;
        }
    }

    //----------------------------------------

    QWORD curStreamTime = 0, lastStreamTime, firstFrameTime = GetQPCTimeMS(clockFreq.QuadPart);
    lastStreamTime = firstFrameTime-frameTime;

    bool bFirstAudioPacket = true;

    while(bRunning)
    {
#ifdef USE_100NS_TIME
        QWORD renderStartTime = GetQPCTime100NS(clockFreq.QuadPart);

        if(sleepTargetTime == 0 || bWasLaggedFrame)
            sleepTargetTime = renderStartTime;
#else
        DWORD renderStartTime = OSGetTime();
        totalStreamTime = renderStartTime-streamTimeStart;

        DWORD frameTimeAdjust = frameTime;
        fpsTimeAdjust += fpsTimeNumerator;
        if(fpsTimeAdjust > fpsTimeDenominator)
        {
            fpsTimeAdjust -= fpsTimeDenominator;
            ++frameTimeAdjust;
        }
#endif

        bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled;

        profileIn("frame");

#ifdef USE_100NS_TIME
        QWORD qwTime = renderStartTime/10000;
        latestVideoTime = qwTime;

        QWORD frameDelta = renderStartTime-lastStreamTime;
        double fSeconds = double(frameDelta)*0.0000001;

        //Log(TEXT("frameDelta: %f"), fSeconds);

        lastStreamTime = renderStartTime;
#else
        QWORD qwTime = GetQPCTimeMS(clockFreq.QuadPart);
        latestVideoTime = qwTime;

        QWORD frameDelta = qwTime-lastStreamTime;
        float fSeconds = float(frameDelta)*0.001f;

        //Log(TEXT("frameDelta: %llu"), frameDelta);

        lastStreamTime = qwTime;
#endif

        //------------------------------------

        if(bRequestKeyframe && keyframeWait > 0)
        {
            keyframeWait -= int(frameDelta);

            if(keyframeWait <= 0)
            {
                GetVideoEncoder()->RequestKeyframe();
                bRequestKeyframe = false;
            }
        }

        if(!bPushToTalkDown && pushToTalkTimeLeft > 0)
        {
            pushToTalkTimeLeft -= int(frameDelta);
            OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft);
            if(pushToTalkTimeLeft <= 0)
            {
                pushToTalkTimeLeft = 0;
                bPushToTalkOn = false;
            }
        }

        //------------------------------------

        OSEnterMutex(hSceneMutex);

        if(bResizeRenderView)
        {
            GS->ResizeView();
            bResizeRenderView = false;
        }

        //------------------------------------

        if(scene)
        {
            profileIn("scene->Preprocess");
            scene->Preprocess();

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Preprocess();

            profileOut;

            scene->Tick(float(fSeconds));

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Tick(float(fSeconds));
        }

        //------------------------------------

        QWORD curBytesSent = network->GetCurrentSentBytes();
        curFramesDropped = network->NumDroppedFrames();
        bool bUpdateBPS = false;

        bpsTime += fSeconds;
        if(bpsTime > 1.0f)
        {
            if(numSecondsWaited < 3)
                ++numSecondsWaited;

            //bytesPerSec = DWORD(curBytesSent - lastBytesSent);
            bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited;

            if(bpsTime > 2.0)
                bpsTime = 0.0f;
            else
                bpsTime -= 1.0;

            if(numSecondsWaited == 3)
            {
                lastBytesSent[0] = lastBytesSent[1];
                lastBytesSent[1] = lastBytesSent[2];
                lastBytesSent[2] = curBytesSent;
            }
            else
                lastBytesSent[numSecondsWaited] = curBytesSent;

            captureFPS = fpsCounter;
            fpsCounter = 0;

            bUpdateBPS = true;
        }

        fpsCounter++;

        curStrain = network->GetPacketStrain();

        EnableBlending(TRUE);
        BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA);

        //------------------------------------
        // render the mini render texture

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(mainPixelShader);

        SetRenderTarget(mainRenderTextures[curRenderTarget]);

        Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0, 0, baseSize.x, baseSize.y);

        if(scene)
            scene->Render();

        //------------------------------------

        if(bTransitioning)
        {
            if(!transitionTexture)
            {
                transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE);
                if(transitionTexture)
                {
                    D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture);
                    D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]);
                    GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture);
                }
                else
                    bTransitioning = false;
            }
            else if(transitionAlpha >= 1.0f)
            {
                delete transitionTexture;
                transitionTexture = NULL;

                bTransitioning = false;
            }
        }

        if(bTransitioning)
        {
            EnableBlending(TRUE);
            transitionAlpha += float(fSeconds)*5.0f;
            if(transitionAlpha > 1.0f)
                transitionAlpha = 1.0f;
        }
        else
            EnableBlending(FALSE);

        //------------------------------------
        // render the mini view thingy

        if(bRenderView)
        {
            Vect2 renderFrameSize = Vect2(float(renderFrameWidth), float(renderFrameHeight));

            SetRenderTarget(NULL);

            LoadVertexShader(mainVertexShader);
            LoadPixelShader(mainPixelShader);

            Ortho(0.0f, renderFrameSize.x, renderFrameSize.y, 0.0f, -100.0f, 100.0f);
            SetViewport(0.0f, 0.0f, renderFrameSize.x, renderFrameSize.y);

            if(bTransitioning)
            {
                BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
                if(renderFrameIn1To1Mode)
                    DrawSprite(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y);
                else
                    DrawSprite(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, renderFrameSize.x, renderFrameSize.y);
                BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
            }

            if(renderFrameIn1To1Mode)
                DrawSprite(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y);
            else
                DrawSprite(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, renderFrameSize.x, renderFrameSize.y);

            Ortho(0.0f, renderFrameSize.x, renderFrameSize.y, 0.0f, -100.0f, 100.0f);

            //draw selections if in edit mode
            if(bEditMode && !bSizeChanging)
            {
                LoadVertexShader(solidVertexShader);
                LoadPixelShader(solidPixelShader);
                solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFFFF0000);

                if(renderFrameIn1To1Mode)
                    Ortho(0.0f, renderFrameSize.x * downscale, renderFrameSize.y * downscale, 0.0f, -100.0f, 100.0f);
                else
                    Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);

                if(scene)
                    scene->RenderSelections();
            }
        }
        else if(bForceRenderViewErase)
        {
            InvalidateRect(hwndRenderFrame, NULL, TRUE);
            UpdateWindow(hwndRenderFrame);
            bForceRenderViewErase = false;
        }

        //------------------------------------
        // actual stream output

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(yuvScalePixelShader);

        Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget];
        SetRenderTarget(yuvRenderTexture);

        yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize);

        Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y);

        //why am I using scaleSize instead of outputSize for the texture?
        //because outputSize can be trimmed by up to three pixels due to 128-bit alignment.
        //using the scale function with outputSize can cause slightly inaccurate scaled images
        if(bTransitioning)
        {
            BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
            DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, scaleSize.x, scaleSize.y);
            BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
        }

        DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, outputSize.x, outputSize.y);

        //------------------------------------

        if(bRenderView && !copyWait)
            static_cast<D3D10System*>(GS)->swap->Present(0, 0);

        OSLeaveMutex(hSceneMutex);

        //------------------------------------
        // present/upload

        profileIn("video encoding and uploading");

        bool bEncode = true;

        if(copyWait)
        {
            copyWait--;
            bEncode = false;
        }
        else
        {
            //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing
            if(!bRecievedFirstAudioFrame)
                bEncode = false;
            else if(bFirstFrame)
            {
                firstFrameTime = qwTime;
                bFirstFrame = false;
            }

            if(!bEncode)
            {
                if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                    curYUVTexture = 0;
                else
                    curYUVTexture++;
            }
        }

        if(bEncode)
        {
            curStreamTime = qwTime-firstFrameTime;

            UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1;

            ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
            profileIn("CopyResource");

            if(!bFirst420Encode && bUseThreaded420)
            {
                WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE);
                copyTexture->Unmap(0);
            }

            D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]);
            GetD3D()->CopyResource(copyTexture, d3dYUV->texture);
            profileOut;

            ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture];

            if(bFirstImage) //ignore the first frame
                bFirstImage = false;
            else
            {
                HRESULT result;
                D3D10_MAPPED_TEXTURE2D map;
                if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map)))
                {
                    int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1;
                    int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1;

                    x264_picture_t &prevPicOut = outPics[prevOutBuffer];
                    x264_picture_t &picOut = outPics[curOutBuffer];
                    x264_picture_t &nextPicOut = outPics[nextOutBuffer];

                    if(!bUsing444)
                    {
                        profileIn("conversion to 4:2:0");

                        if(bUseThreaded420)
                        {
                            outTimes[nextOutBuffer] = (DWORD)curStreamTime;

                            for(int i=0; i<numThreads; i++)
                            {
                                convertInfo[i].input     = (LPBYTE)map.pData;
                                convertInfo[i].pitch     = map.RowPitch;
                                convertInfo[i].output[0] = nextPicOut.img.plane[0];
                                convertInfo[i].output[1] = nextPicOut.img.plane[1];
                                convertInfo[i].output[2] = nextPicOut.img.plane[2];
                                SetEvent(convertInfo[i].hSignalConvert);
                            }

                            if(bFirst420Encode)
                                bFirst420Encode = bEncode = false;
                        }
                        else
                        {
                            outTimes[curOutBuffer] = (DWORD)curStreamTime;
                            Convert444to420((LPBYTE)map.pData, outputCX, map.RowPitch, outputCY, 0, outputCY, picOut.img.plane, SSE2Available());
                            prevTexture->Unmap(0);
                        }

                        profileOut;
                    }
                    else
                    {
                        outTimes[curOutBuffer] = (DWORD)curStreamTime;

                        picOut.img.i_stride[0] = map.RowPitch;
                        picOut.img.plane[0]    = (uint8_t*)map.pData;
                    }

                    if(bEncode)
                    {
                        DWORD curFrameTimestamp = outTimes[prevOutBuffer];
                        //Log(TEXT("curFrameTimestamp: %u"), curFrameTimestamp);

                        //------------------------------------

                        FrameProcessInfo frameInfo;
                        frameInfo.firstFrameTime = firstFrameTime;
                        frameInfo.prevTexture = prevTexture;

                        if(bUseCFR)
                        {
                            while(cfrTime < curFrameTimestamp)
                            {
                                DWORD frameTimeAdjust = frameTime;
                                cfrTimeAdjust += fpsTimeNumerator;
                                if(cfrTimeAdjust > fpsTimeDenominator)
                                {
                                    cfrTimeAdjust -= fpsTimeDenominator;
                                    ++frameTimeAdjust;
                                }

                                DWORD halfTime = (frameTimeAdjust+1)/2;

                                x264_picture_t *nextPic = (curFrameTimestamp-cfrTime <= halfTime) ? &picOut : &prevPicOut;
                                //Log(TEXT("cfrTime: %u, time: %u"), cfrTime, curFrameTimestamp);

                                //these lines are just for counting duped frames
                                if(nextPic == lastPic)
                                    ++numTotalDuplicatedFrames;
                                else
                                    lastPic = nextPic;

                                frameInfo.picOut = nextPic;
                                frameInfo.picOut->i_pts = cfrTime;
                                frameInfo.frameTimestamp = cfrTime;
                                ProcessFrame(frameInfo);

                                cfrTime += frameTimeAdjust;

                                //Log(TEXT("cfrTime: %u, chi frame: %u"), cfrTime, (curFrameTimestamp-cfrTime <= halfTime));
                            }
                        }
                        else
                        {
                            picOut.i_pts = curFrameTimestamp;

                            frameInfo.picOut = &picOut;
                            frameInfo.frameTimestamp = curFrameTimestamp;

                            ProcessFrame(frameInfo);
                        }
                    }

                    curOutBuffer = nextOutBuffer;
                }
                else
                {
                    //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled
                    CrashError (TEXT("Texture->Map failed: 0x%08x"), result);
                }
            }

            if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
                curCopyTexture = 0;
            else
                curCopyTexture++;

            if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                curYUVTexture = 0;
            else
                curYUVTexture++;
        }

        lastRenderTarget = curRenderTarget;

        if(curRenderTarget == (NUM_RENDER_BUFFERS-1))
            curRenderTarget = 0;
        else
            curRenderTarget++;

        if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped)
        {
            PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);
            lastStrain = curStrain;

            lastFramesDropped = curFramesDropped;
        }

        profileOut;
        profileOut;

        //------------------------------------
        // get audio while sleeping or capturing
        //ReleaseSemaphore(hRequestAudioEvent, 1, NULL);

        //------------------------------------
        // frame sync

#ifdef USE_100NS_TIME
        QWORD renderStopTime = GetQPCTime100NS(clockFreq.QuadPart);
        sleepTargetTime += frameTime100ns;

        if(bWasLaggedFrame = (sleepTargetTime <= renderStopTime))
            numLongFrames++;
        else
            SleepTo(clockFreq.QuadPart, sleepTargetTime);
#else
        DWORD renderStopTime = OSGetTime();
        DWORD totalTime = renderStopTime-renderStartTime;

        if(totalTime > frameTimeAdjust)
            numLongFrames++;
        else if(totalTime < frameTimeAdjust)
            OSSleep(frameTimeAdjust-totalTime);
#endif

        //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime);

        numTotalFrames++;
    }

    if(!bUsing444)
    {
        if(bUseThreaded420)
        {
            for(int i=0; i<numThreads; i++)
            {
                if(h420Threads[i])
                {
                    convertInfo[i].bKillThread = true;
                    SetEvent(convertInfo[i].hSignalConvert);

                    OSTerminateThread(h420Threads[i], 10000);
                    h420Threads[i] = NULL;
                }

                if(convertInfo[i].hSignalConvert)
                {
                    CloseHandle(convertInfo[i].hSignalConvert);
                    convertInfo[i].hSignalConvert = NULL;
                }

                if(convertInfo[i].hSignalComplete)
                {
                    CloseHandle(convertInfo[i].hSignalComplete);
                    convertInfo[i].hSignalComplete = NULL;
                }
            }

            if(!bFirst420Encode)
            {
                ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
                copyTexture->Unmap(0);
            }
        }

        for(int i=0; i<NUM_OUT_BUFFERS; i++)
            x264_picture_clean(&outPics[i]);
    }

    Free(h420Threads);
    Free(convertInfo);

    Log(TEXT("Total frames rendered: %d, number of frames that lagged: %d (%0.2f%%) (it's okay for some frames to lag)"), numTotalFrames, numLongFrames, (double(numLongFrames)/double(numTotalFrames))*100.0);
    if(bUseCFR)
        Log(TEXT("Total duplicated CFR frames: %d"), numTotalDuplicatedFrames);
}