示例#1
0
文件: x264lib.c 项目: svn2github/Xpra
int compress_image(struct x264lib_ctx *ctx, const uint8_t *in, int stride, uint8_t **out, int *outsz)
{
	if (!ctx->encoder || !ctx->rgb2yuv)
		return 1;

	x264_picture_t pic_in, pic_out;
	x264_picture_alloc(&pic_in, X264_CSP_I420, ctx->width, ctx->height);

	/* Colorspace conversion (RGB -> I420) */
	sws_scale(ctx->rgb2yuv, &in, &stride, 0, ctx->height, pic_in.img.plane, pic_in.img.i_stride);

	/* Encoding */
	pic_in.i_pts = 1;

	x264_nal_t* nals;
	int i_nals;
	int frame_size = x264_encoder_encode(ctx->encoder, &nals, &i_nals, &pic_in, &pic_out);
	if (frame_size >= 0) {
		/* Do not free that! */
		*out = nals[0].p_payload;
		*outsz = frame_size;
	} else {
		fprintf(stderr, "Problem\n");
		x264_picture_clean(&pic_in);
		return 2;
	}
  
	x264_picture_clean(&pic_in);
	return 0;
}
示例#2
0
H264Encoder::~H264Encoder() {
    if ( x264_hdl_ == NULL) {
        x264_picture_clean(&x264_picin_);
        x264_picture_clean(&x264_picout_);
        x264_encoder_close(x264_hdl_);
        x264_hdl_ = NULL;
    }
}
/**
 * When compress end, clear some resource
 */
jint
Java_com_livecamera_encoder_h264encoder_CompressEnd( JNIEnv* env, jobject thiz, jlong handle)
{
	Encoder *en = (Encoder*)handle;
	ALOGE("begin free!");
	if (en->picture) {
		ALOGE("begin free picture");
		x264_picture_clean(en->picture);
		free(en->picture);
		en->picture = 0;
	}

	ALOGE("after free picture");

	if (en->param) {
		free(en->param);
		en->param = 0;
	}

	ALOGE("after free param");

	if (en->handle) {
		x264_encoder_close(en->handle);
	}
	ALOGE("after close encoder");

	free(en);

	ALOGE("after free encoder");

	return 0;
}
示例#4
0
int encode_context_frame(EncodeContext context, const char * frameData, char ** nalData, int * dataSize) {
    x264_picture_t pictureOut, pictureIn;
    x264_picture_alloc(&pictureIn, X264_CSP_I420, context.width, context.height);
    int rgbStride = context.width * 3;
    sws_scale(context.converter, (const uint8_t **)&frameData, &rgbStride, 0, context.height,
              pictureIn.img.plane, pictureIn.img.i_stride);
    
    x264_nal_t * nals;
    int i_nals;
    if (DEBUG_ENABLED) printf("encode_context_frame: passing data to x264\n");
    int frameSize = x264_encoder_encode(context.encoder, &nals, &i_nals, &pictureIn, &pictureOut); // TODO: figure out if picture_out can be NULL
    x264_picture_clean(&pictureIn);
    if (frameSize <= 0) return -1;
    
    if (DEBUG_ENABLED) printf("encode_context_frame: joining the frames\n");
    
    int totalSize = 0;
    for (int i = 0; i < i_nals; i++) {
        totalSize += nals[i].i_payload;
    }
    
    char * returnData = (char *)malloc(totalSize);
    int offset = 0;
    for (int i = 0; i < i_nals; i++) {
        memcpy(&returnData[offset], nals[i].p_payload, nals[i].i_payload);
        offset += nals[i].i_payload;
    }
    
    *nalData = returnData;
    *dataSize = totalSize;
    
    return 0;
}
示例#5
0
_declspec(dllexport) void __cdecl FreeContext(struct TranscoderContext* ctx) {
	FreeEncoder(ctx->encoder);
	FreeFfmpeg(ctx->decoder_ctx);
	x264_picture_clean(&pic_in);
	free(bts);
	free(ctx);
}
示例#6
0
//compress end
void compress_end()
{
	struct encoder *en = h264_encoder;

	if (en->picture)
	{
		x264_picture_clean(en->picture);

		en->picture = NULL;
	}

	if (en->param)
	{
		free(en->param);
		en->param = NULL;
	}

	if (en->handle)
	{
		x264_encoder_close(en->handle);
	}

	free(en);
	en = NULL;
}
示例#7
0
int CX264Encoder::close()
{
	int ret = 0;
	if (h){
		x264_encoder_close(h);
		x264_picture_clean(&pic);
	}
	return ret;
}
示例#8
0
int H264EncWrapper::Destroy()
{
    x264_picture_clean( &m_pic );

    // TODO: clean m_h
    //x264_encoder_close(m_h); //?????
    
   return 0;
}
示例#9
0
bool CH264Encoder::End()
{
    if (m_pic.img.plane[0])
    {
        x264_picture_clean(&m_pic);
    }

    if (m_pSwsContext)
    {
        sws_freeContext(m_pSwsContext);
        m_pSwsContext = NULL;
    }

    if (m_pFlv)
    {
        m_pFlv->WriteTailer();
    }

    if (m_pFlv)
    {
        delete m_pFlv;
        m_pFlv = NULL;
    }

    if (mux_buffer)
    {
        free(mux_buffer);
    }
    mux_buffer = NULL;
    mux_buffer_size = 0;

    if (m_px264)
    {
        x264_encoder_close(m_px264);
        m_px264 = NULL;
    }

    m_pOutDataSink = NULL;

#ifdef YUV_FILE
    if (pYUVFile)
    {
        fclose(pYUVFile);
        pYUVFile = NULL;
    }
#endif

#ifdef H264_FILE
    if (pH264File)
    {
        fclose(pH264File);
        pH264File = NULL;
    }
#endif

    return true;
}
示例#10
0
msx264::~msx264()
{
//* 清除图像区域
	x264_picture_clean(pPicIn);
	x264_picture_clean(pPicOut);
//* 关闭编码器句柄
	x264_encoder_close(pX264Handle);
	pX264Handle = NULL;

	delete pPicIn;
	pPicIn = NULL;

	delete pPicOut;
	pPicOut = NULL;

    close(file);
	//delete params;
	//params = NULL;
}
示例#11
0
H264Exporter::~H264Exporter()
{
  // Flush encoder.
  while (x264_encoder_delayed_frames(_encoder)) {
    add_frame(nullptr);
  }
  x264_picture_clean(&_pic);
  x264_encoder_close(_encoder);
  _file.close();
}
int X264Encoder::Destroy()
{
    if (m_h)
    {
        x264_picture_clean( &m_pic );

        x264_encoder_close(m_h);
    }

    return 0;
}
示例#13
0
void encoder_end(Encoder *encoder) 
{
	if (encoder->picture) {
		x264_picture_clean(encoder->picture);
		free(encoder->picture);
		encoder->picture = 0;
	}
	if (encoder->param) {
		free(encoder->param);
		encoder->param = 0;
	}
	if (encoder->handle) {
		x264_encoder_close(encoder->handle);
	}
	free(g_H264_Buf);
}
示例#14
0
文件: capenc.c 项目: ricann/videoctd
void encode_close()
{
	x264_picture_clean(x264_encode.pic);
	x264_encoder_close(x264_encode.handle);

	if(h264_buf) {
		free(h264_buf);
		h264_buf = NULL;
	}

	if(x264_encode.para) {
		free(x264_encode.para);
		x264_encode.para = NULL;
	}

	if(x264_encode.nal) {
		free(x264_encode.nal);
		x264_encode.nal = NULL;
	}
}
示例#15
0
jint Java_h264_com_H264Encoder_CompressEnd(JNIEnv* env, jobject thiz,jlong handle)
{
	Encoder * en = (Encoder *) handle;
	if(en->picture)
	{
		x264_picture_clean(en->picture);
		free(en->picture);
		en->picture	= 0;
	}
	if(en->param)
	{
		free(en->param);
		en->param=0;
	}
	if(en->handle)
	{
		x264_encoder_close(en->handle);
	}
	free(en);
	return 0;
}
示例#16
0
void CX264VideoEncoder::StopEncoder (void)
{
#ifndef USE_OUR_YUV
  x264_picture_clean(&m_pic_input);
#endif
  x264_encoder_close(m_h);


  CHECK_AND_FREE(m_vopBuffer);
  CHECK_AND_FREE(m_YUV);
  CHECK_AND_FREE(m_nal_info);
#ifdef OUTPUT_RAW
  if (m_outfile) {
    fclose(m_outfile);
  }
#endif
  if (m_push != NULL) {
    delete m_push;
    m_push = NULL;
  }
	  
}
示例#17
0
static int delete_codec(quicktime_video_map_t *vtrack)
{
	quicktime_h264_codec_t *codec;
	int i;


	codec = ((quicktime_codec_t*)vtrack->codec)->priv;
	for(i = 0; i < codec->total_fields; i++)
	{
		if(codec->encode_initialized[i])
		{
			pthread_mutex_lock(&h264_lock);


			if(codec->pic[i])
			{
				x264_picture_clean(codec->pic[i]);
				free(codec->pic[i]);
			}

			if(codec->encoder[i])
			{
				x264_encoder_close(codec->encoder[i]);
			}

			pthread_mutex_unlock(&h264_lock);
		}
	}



	if(codec->temp_frame) free(codec->temp_frame);
	if(codec->work_buffer) free(codec->work_buffer);
	if(codec->decoder) quicktime_delete_ffmpeg(codec->decoder);


	free(codec);
	return 0;
}
示例#18
0
jint Java_com_H264_H264Encoder_CompressEnd(JNIEnv* env, jobject thiz,jlong handle)
{
	 Encoder * en = (Encoder *) handle;

	 while(x264_encoder_delayed_frames( en->handle )){
		 ibebug();
	    x264_picture_t pic_out;
	    int i_nal;
	    int i_frame_size = 0;

	    i_frame_size = x264_encoder_encode( en->handle, &(en->nal), &i_nal, 0, &pic_out );
	    if( i_frame_size )
	    {
	        i_frame_size = mp4_output.write_frame( opt->hout, en->nal[0].p_payload, i_frame_size, &pic_out );
	    }
	 }

	 if(en->picture)
	 {
	 x264_picture_clean(en->picture);
	 free(en->picture);
	 en->picture = 0;
	 }
	 if(en->param)
	 {
	 free(en->param);
	 en->param=0;
	 }
	 if(en->handle)
	 {
	 x264_encoder_close(en->handle);
	 }
	 mp4_output.close_file(opt->hout,largest_pts,second_largest_pts);
	 free(en);
	 free(opt);
	 return 0;
} 
示例#19
0
void unInitEncoder(ENCODER_S* pcEncoder)
{
	if (NULL == pcEncoder)	
	{
		printf("[%s,%d] pcEncoder == NULL\n", __func__, __LINE__);	
		return ;
	}

	if (NULL != pcEncoder->m_psPic)
	{
		if (pcEncoder->m_cIsPicAlloc)
		{
			x264_picture_clean(pcEncoder->m_psPic);	
			pcEncoder->m_cIsPicAlloc = 0;
		}
		
		free(pcEncoder->m_psPic);		
		pcEncoder->m_psPic = NULL;
	}

	if (NULL != pcEncoder->m_psParam)
	{
		free(pcEncoder->m_psParam);		
		pcEncoder->m_psParam = NULL;
	}

//printf("789, pcEncoder=%p, pcEncoder->m_psHandle=%p\n",pcEncoder,pcEncoder->m_psHandle);

	if (NULL != pcEncoder->m_psHandle && pcEncoder->m_cIsEncodeOpen == 1)
	{
		x264_encoder_close(pcEncoder->m_psHandle);
		pcEncoder->m_psHandle = NULL;
		pcEncoder->m_cIsEncodeOpen = 0;
	}

}
示例#20
0
void close_x264_encoder(Encoder * enc)
{
	// free picture and set to null
	if(enc->picture)
	{
		x264_picture_clean(enc->picture); 
		free(enc->picture);
		enc->picture = 0 ;
	}

	// free param  
	if(enc->param)
	{
		free(enc->param);
		enc->param = 0 ; 
	}

	if(enc->handle)
	{
		x264_encoder_close(enc->handle); 
	}
	// should i free the handle ?
	free(enc->handle);	
}
示例#21
0
int main(int argc, char* argv[]){
    x264_param_t param;
    x264_t *h = NULL;
    x264_picture_t pic_in;
    x264_picture_t pic_out;
    x264_nal_t *nal;
    uint8_t *data = NULL;
    int widthXheight = width * height;
    int frame_size = width * height * 1.5;
    int read_sum = 0, write_sum = 0;
    int frames = 0;
    int i, rnum, i_size;
    x264_nal_t* pNals = NULL;

    x264_param_default(&param);
    param.i_width = width;
    param.i_height = height;
    param.i_bframe = 3;
    param.i_fps_num = 25;
    param.i_fps_den = 1;
    param.b_vfr_input = 0;
    param.i_keyint_max = 250;
    param.rc.i_bitrate = 1500;
    param.i_scenecut_threshold = 40;
    param.i_level_idc = 51;

    x264_param_apply_profile(&param, "high");

    h = x264_encoder_open( &param );

//    printf("param.rc.i_qp_min=%d, param.rc.i_qp_max=%d, param.rc.i_qp_step=%d param.rc.i_qp_constant=%d param.rc.i_rc_method=%d\n",
//            param.rc.i_qp_min, param.rc.i_qp_max, param.rc.i_qp_step, param.rc.i_qp_constant, param.rc.i_rc_method);
    printf("param:%s\n", x264_param2string(&param, 1));


    x264_picture_init( &pic_in );
    x264_picture_alloc(&pic_in, X264_CSP_YV12, width, height);
    pic_in.img.i_csp = X264_CSP_YV12;
    pic_in.img.i_plane = 3;

    data = (uint8_t*)malloc(0x400000);

    FILE* fpr = fopen(MFILE ".yuv", "rb");
    FILE* fpw1 = fopen(MFILE".szhu.h264", "wb");
//    FILE* fpw2 = fopen(MFILE".h264", "wb");

    if(!fpr || !fpw1 ) {
        printf("file open failed\n");
        return -1;
    }

    while(!feof(fpr)){
        rnum = fread(data, 1, frame_size, fpr);
        if(rnum != frame_size){
            printf("read file failed\n");
            break;
        }
        memcpy(pic_in.img.plane[0], data, widthXheight);
        memcpy(pic_in.img.plane[1], data + widthXheight, widthXheight >> 2);
        memcpy(pic_in.img.plane[2], data + widthXheight + (widthXheight >> 2), widthXheight >> 2);
        read_sum += rnum;
        frames ++;
//        printf("read frames=%d %.2fMB write:%.2fMB\n", frames, read_sum * 1.0 / 0x100000, write_sum * 1.0 / 0x100000);
        int i_nal;
        int i_frame_size = 0;

        if(0 && frames % 12 == 0){
            pic_in.i_type = X264_TYPE_I;
        }else{
            pic_in.i_type = X264_TYPE_AUTO;
        }
        i_frame_size = x264_encoder_encode( h, &nal, &i_nal, &pic_in, &pic_out );

        if(i_frame_size <= 0){
            //printf("\t!!!FAILED encode frame \n");
        }else{
            fwrite(nal[0].p_payload, 1, i_frame_size, fpw1);
//            printf("\t+++i_frame_size=%d\n", i_frame_size);
            write_sum += i_frame_size;
        }
#if 0
        for(i = 0; i < i_nal; i ++){
            i_size = nal[i].i_payload;
//            fwrite(nal[i].p_payload, 1, nal[i].i_payload, fpw1);
            fwrite(nal[i].p_payload, 1, i_frame_size, fpw1);
            x264_nal_encode(h, data, &nal[i]);
            if(i_size != nal[i].i_payload){
                printf("\t\ti_size=%d nal[i].i_payload=%d\n", i_size, nal[i].i_payload);
            }
            fwrite(data, 1, nal[i].i_payload, fpw2);
        }
#endif
    }

    free(data);
    x264_picture_clean(&pic_in);
    x264_picture_clean(&pic_out);
    if(h){
        x264_encoder_close(h);
        h = NULL;
    }
    fclose(fpw1);
//    fclose(fpw2);
    fclose(fpr);
    printf("h=0x%X", h);
    return 0;
}
示例#22
0
//todo: this function is an abomination, this is just disgusting.  fix it.
//...seriously, this is really, really horrible.  I mean this is amazingly bad.
void OBS::MainCaptureLoop()
{
    int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0;
    int copyWait = NUM_RENDER_BUFFERS-1;

    bSentHeaders = false;
    bFirstAudioPacket = true;

    bool bLogLongFramesProfile = GlobalConfig->GetInt(TEXT("General"), TEXT("LogLongFramesProfile"), LOGLONGFRAMESDEFAULT) != 0;
    float logLongFramesProfilePercentage = GlobalConfig->GetFloat(TEXT("General"), TEXT("LogLongFramesProfilePercentage"), 10.f);

    Vect2 baseSize    = Vect2(float(baseCX), float(baseCY));
    Vect2 outputSize  = Vect2(float(outputCX), float(outputCY));
    Vect2 scaleSize   = Vect2(float(scaleCX), float(scaleCY));

    HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI"));

    //----------------------------------------
    // x264 input buffers

    int curOutBuffer = 0;

    bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;
    if(bUsingQSV)
        bUsing444 = false;

    EncoderPicture lastPic;
    EncoderPicture outPics[NUM_OUT_BUFFERS];
    DWORD outTimes[NUM_OUT_BUFFERS] = {0, 0, 0};

    for(int i=0; i<NUM_OUT_BUFFERS; i++)
    {
        if(bUsingQSV)
        {
            outPics[i].mfxOut = new mfxFrameSurface1;
            memset(outPics[i].mfxOut, 0, sizeof(mfxFrameSurface1));
            mfxFrameData& data = outPics[i].mfxOut->Data;
            videoEncoder->RequestBuffers(&data);
        }
        else
        {
            outPics[i].picOut = new x264_picture_t;
            x264_picture_init(outPics[i].picOut);
        }
    }

    if(bUsing444)
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
        {
            outPics[i].picOut->img.i_csp   = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
            outPics[i].picOut->img.i_plane = 1;
        }
    }
    else
    {
        if(!bUsingQSV)
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
                x264_picture_alloc(outPics[i].picOut, X264_CSP_NV12, outputCX, outputCY);
    }

    int bCongestionControl = AppConfig->GetInt (TEXT("Video Encoding"), TEXT("CongestionControl"), 0);
    bool bDynamicBitrateSupported = App->GetVideoEncoder()->DynamicBitrateSupported();
    int defaultBitRate = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int currentBitRate = defaultBitRate;
    QWORD lastAdjustmentTime = 0;
    UINT adjustmentStreamId = 0;

    //----------------------------------------
    // time/timestamp stuff

    bufferedTimes.Clear();
    ctsOffsets.Clear();
    int bufferedFrames = 1; //to avoid constantly polling number of frames

#ifdef USE_100NS_TIME
    QWORD streamTimeStart = GetQPCTime100NS();
    QWORD frameTime100ns = 10000000/fps;

    QWORD sleepTargetTime = 0;
    bool bWasLaggedFrame = false;
#else
    DWORD streamTimeStart = OSGetTime();
    DWORD fpsTimeAdjust = 0;
#endif
    totalStreamTime = 0;

    lastAudioTimestamp = 0;

    latestVideoTime = firstSceneTimestamp = GetQPCTimeMS();

    DWORD fpsTimeNumerator = 1000-(frameTime*fps);
    DWORD fpsTimeDenominator = fps;
    DWORD cfrTime = 0;
    DWORD cfrTimeAdjust = 0;

    //----------------------------------------
    // start audio capture streams

    desktopAudio->StartCapture();
    if(micAudio) micAudio->StartCapture();

    //----------------------------------------
    // status bar/statistics stuff

    DWORD fpsCounter = 0;

    int numLongFrames = 0;
    int numTotalFrames = 0;

    int numTotalDuplicatedFrames = 0;

    bytesPerSec = 0;
    captureFPS = 0;
    curFramesDropped = 0;
    curStrain = 0.0;
    PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);

    QWORD lastBytesSent[3] = {0, 0, 0};
    DWORD lastFramesDropped = 0;
#ifdef USE_100NS_TIME
    double bpsTime = 0.0;
#else
    float bpsTime = 0.0f;
#endif
    double lastStrain = 0.0f;
    DWORD numSecondsWaited = 0;

    //----------------------------------------
    // 444->420 thread data

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads);
    Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads);

    zero(h420Threads, sizeof(HANDLE)*numThreads);
    zero(convertInfo, sizeof(Convert444Data)*numThreads);

    for(int i=0; i<numThreads; i++)
    {
        convertInfo[i].width  = outputCX;
        convertInfo[i].height = outputCY;
        convertInfo[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].bNV12 = bUsingQSV;

        if(i == 0)
            convertInfo[i].startY = 0;
        else
            convertInfo[i].startY = convertInfo[i-1].endY;

        if(i == (numThreads-1))
            convertInfo[i].endY = outputCY;
        else
            convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bool bFirstFrame = true;
    bool bFirstImage = true;
    bool bFirst420Encode = true;
    bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444;

    List<HANDLE> completeEvents;

    if(bUseThreaded420)
    {
        for(int i=0; i<numThreads; i++)
        {
            h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i);
            completeEvents << convertInfo[i].hSignalComplete;
        }
    }

    //----------------------------------------

    QWORD curStreamTime = 0, lastStreamTime, firstFrameTime = GetQPCTimeMS();

#ifdef USE_100NS_TIME
    lastStreamTime = GetQPCTime100NS()-frameTime100ns;
#else
    lastStreamTime = firstFrameTime-frameTime;
#endif

    //bool bFirstAudioPacket = true;

    List<ProfilerNode> threadedProfilers;
    bool bUsingThreadedProfilers = false;

    while(bRunning || bufferedFrames)
    {
#ifdef USE_100NS_TIME
        QWORD renderStartTime = GetQPCTime100NS();
        totalStreamTime = DWORD((renderStartTime-streamTimeStart)/10000);

        if(sleepTargetTime == 0 || bWasLaggedFrame)
            sleepTargetTime = renderStartTime;
#else
        DWORD renderStartTime = OSGetTime();
        totalStreamTime = renderStartTime-streamTimeStart;

        DWORD frameTimeAdjust = frameTime;
        fpsTimeAdjust += fpsTimeNumerator;
        if(fpsTimeAdjust > fpsTimeDenominator)
        {
            fpsTimeAdjust -= fpsTimeDenominator;
            ++frameTimeAdjust;
        }
#endif

        bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled;

        profileIn("frame");

#ifdef USE_100NS_TIME
        QWORD qwTime = renderStartTime/10000;
        latestVideoTime = qwTime;

        QWORD frameDelta = renderStartTime-lastStreamTime;
        double fSeconds = double(frameDelta)*0.0000001;

        //Log(TEXT("frameDelta: %f"), fSeconds);

        lastStreamTime = renderStartTime;
#else
        QWORD qwTime = GetQPCTimeMS();
        latestVideoTime = qwTime;

        QWORD frameDelta = qwTime-lastStreamTime;
        float fSeconds = float(frameDelta)*0.001f;

        //Log(TEXT("frameDelta: %llu"), frameDelta);

        lastStreamTime = qwTime;
#endif

        bool bUpdateBPS = false;
        profileIn("frame preprocessing and rendering");

        //------------------------------------

        if(bRequestKeyframe && keyframeWait > 0)
        {
            keyframeWait -= int(frameDelta);

            if(keyframeWait <= 0)
            {
                GetVideoEncoder()->RequestKeyframe();
                bRequestKeyframe = false;
            }
        }

        if(!bPushToTalkDown && pushToTalkTimeLeft > 0)
        {
            pushToTalkTimeLeft -= int(frameDelta);
            OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft);
            if(pushToTalkTimeLeft <= 0)
            {
                pushToTalkTimeLeft = 0;
                bPushToTalkOn = false;
            }
        }

        //------------------------------------

        OSEnterMutex(hSceneMutex);

        if(bResizeRenderView)
        {
            GS->ResizeView();
            bResizeRenderView = false;
        }

        //------------------------------------

        if(scene)
        {
            profileIn("scene->Preprocess");
            scene->Preprocess();

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Preprocess();

            profileOut;

            scene->Tick(float(fSeconds));

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Tick(float(fSeconds));
        }

        //------------------------------------

        QWORD curBytesSent = network->GetCurrentSentBytes();
        curFramesDropped = network->NumDroppedFrames();

        bpsTime += fSeconds;
        if(bpsTime > 1.0f)
        {
            if(numSecondsWaited < 3)
                ++numSecondsWaited;

            //bytesPerSec = DWORD(curBytesSent - lastBytesSent);
            bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited;

            if(bpsTime > 2.0)
                bpsTime = 0.0f;
            else
                bpsTime -= 1.0;

            if(numSecondsWaited == 3)
            {
                lastBytesSent[0] = lastBytesSent[1];
                lastBytesSent[1] = lastBytesSent[2];
                lastBytesSent[2] = curBytesSent;
            }
            else
                lastBytesSent[numSecondsWaited] = curBytesSent;

            captureFPS = fpsCounter;
            fpsCounter = 0;

            bUpdateBPS = true;
        }

        fpsCounter++;

        curStrain = network->GetPacketStrain();

        EnableBlending(TRUE);
        BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA);

        //------------------------------------
        // render the mini render texture

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(mainPixelShader);

        SetRenderTarget(mainRenderTextures[curRenderTarget]);

        Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0, 0, baseSize.x, baseSize.y);

        if(scene)
            scene->Render();

        //------------------------------------

        if(bTransitioning)
        {
            if(!transitionTexture)
            {
                transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE);
                if(transitionTexture)
                {
                    D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture);
                    D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]);
                    GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture);
                }
                else
                    bTransitioning = false;
            }
            else if(transitionAlpha >= 1.0f)
            {
                delete transitionTexture;
                transitionTexture = NULL;

                bTransitioning = false;
            }
        }

        if(bTransitioning)
        {
            EnableBlending(TRUE);
            transitionAlpha += float(fSeconds)*5.0f;
            if(transitionAlpha > 1.0f)
                transitionAlpha = 1.0f;
        }
        else
            EnableBlending(FALSE);

        //------------------------------------
        // render the mini view thingy

        if(bRenderView)
        {
            // Cache
            const Vect2 renderFrameSize = GetRenderFrameSize();
            const Vect2 renderFrameOffset = GetRenderFrameOffset();
            const Vect2 renderFrameCtrlSize = GetRenderFrameControlSize();

            SetRenderTarget(NULL);

            LoadVertexShader(mainVertexShader);
            LoadPixelShader(mainPixelShader);

            Ortho(0.0f, renderFrameCtrlSize.x, renderFrameCtrlSize.y, 0.0f, -100.0f, 100.0f);
            if(renderFrameCtrlSize.x != oldRenderFrameCtrlWidth || renderFrameCtrlSize.y != oldRenderFrameCtrlHeight)
            {
                // User is drag resizing the window. We don't recreate the swap chains so our coordinates are wrong
                SetViewport(0.0f, 0.0f, (float)oldRenderFrameCtrlWidth, (float)oldRenderFrameCtrlHeight);
            }
            else
                SetViewport(0.0f, 0.0f, renderFrameCtrlSize.x, renderFrameCtrlSize.y);

            // Draw background (Black if fullscreen, window colour otherwise)
            if(bFullscreenMode)
                ClearColorBuffer(0x000000);
            else
                ClearColorBuffer(GetSysColor(COLOR_BTNFACE));

            if(bTransitioning)
            {
                BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
                DrawSprite(transitionTexture, 0xFFFFFFFF,
                        renderFrameOffset.x, renderFrameOffset.y,
                        renderFrameOffset.x + renderFrameSize.x, renderFrameOffset.y + renderFrameSize.y);
                BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
            }

            DrawSprite(mainRenderTextures[curRenderTarget], 0xFFFFFFFF,
                    renderFrameOffset.x, renderFrameOffset.y,
                    renderFrameOffset.x + renderFrameSize.x, renderFrameOffset.y + renderFrameSize.y);

            //draw selections if in edit mode
            if(bEditMode && !bSizeChanging)
            {
                if(scene) {
                    LoadVertexShader(solidVertexShader);
                    LoadPixelShader(solidPixelShader);
                    solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFF0000);
                    scene->RenderSelections(solidPixelShader);
                }
            }
        }
        else if(bForceRenderViewErase)
        {
            InvalidateRect(hwndRenderFrame, NULL, TRUE);
            UpdateWindow(hwndRenderFrame);
            bForceRenderViewErase = false;
        }

        //------------------------------------
        // actual stream output

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(yuvScalePixelShader);

        Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget];
        SetRenderTarget(yuvRenderTexture);

        if(downscale < 2.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize);
        else if(downscale < 3.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/(outputSize*3.0f));

        Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y);

        //why am I using scaleSize instead of outputSize for the texture?
        //because outputSize can be trimmed by up to three pixels due to 128-bit alignment.
        //using the scale function with outputSize can cause slightly inaccurate scaled images
        if(bTransitioning)
        {
            BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
            DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, 1.0f, 1.0f);
            BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
        }

        DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, 1.0f, 1.0f);

        //------------------------------------

        if(bRenderView && !copyWait)
            static_cast<D3D10System*>(GS)->swap->Present(0, 0);

        OSLeaveMutex(hSceneMutex);

        profileOut;

        //------------------------------------
        // present/upload

        profileIn("video encoding and uploading");

        bool bEncode = true;

        if(copyWait)
        {
            copyWait--;
            bEncode = false;
        }
        else
        {
            //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing
            if(!bRecievedFirstAudioFrame)
            {
                static bool bWarnedAboutNoAudio = false;
                if (qwTime-firstFrameTime > 10000 && !bWarnedAboutNoAudio)
                {
                    bWarnedAboutNoAudio = true;
                    //AddStreamInfo (TEXT ("WARNING: OBS is not receiving audio frames. Please check your audio devices."), StreamInfoPriority_Critical); 
                }
                bEncode = false;
            }
            else if(bFirstFrame)
            {
                firstFrameTime = qwTime;
                bFirstFrame = false;
            }

            if(!bEncode)
            {
                if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                    curYUVTexture = 0;
                else
                    curYUVTexture++;
            }
        }

        if(bEncode)
        {
            curStreamTime = qwTime-firstFrameTime;

            UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1;

            ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
            profileIn("CopyResource");

            if(!bFirst420Encode && bUseThreaded420)
            {
                WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE);
                copyTexture->Unmap(0);
            }

            D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]);
            GetD3D()->CopyResource(copyTexture, d3dYUV->texture);
            profileOut;

            ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture];

            if(bFirstImage) //ignore the first frame
                bFirstImage = false;
            else
            {
                HRESULT result;
                D3D10_MAPPED_TEXTURE2D map;
                if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map)))
                {
                    int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1;
                    int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1;

                    EncoderPicture &prevPicOut = outPics[prevOutBuffer];
                    EncoderPicture &picOut = outPics[curOutBuffer];
                    EncoderPicture &nextPicOut = outPics[nextOutBuffer];

                    if(!bUsing444)
                    {
                        profileIn("conversion to 4:2:0");

                        if(bUseThreaded420)
                        {
                            outTimes[nextOutBuffer] = (DWORD)curStreamTime;

                            bool firstRun = threadedProfilers.Num() == 0;
                            if(firstRun)
                                threadedProfilers.SetSize(numThreads);

                            for(int i=0; i<numThreads; i++)
                            {
                                convertInfo[i].input     = (LPBYTE)map.pData;
                                convertInfo[i].inPitch   = map.RowPitch;
                                if(bUsingQSV)
                                {
                                    mfxFrameData& data = nextPicOut.mfxOut->Data;
                                    videoEncoder->RequestBuffers(&data);
                                    convertInfo[i].outPitch  = data.Pitch;
                                    convertInfo[i].output[0] = data.Y;
                                    convertInfo[i].output[1] = data.UV;
                                }
                                else
                                {
                                    convertInfo[i].output[0] = nextPicOut.picOut->img.plane[0];
                                    convertInfo[i].output[1] = nextPicOut.picOut->img.plane[1];
                                    convertInfo[i].output[2] = nextPicOut.picOut->img.plane[2];
								}
                                if(!firstRun)
                                    threadedProfilers[i].~ProfilerNode();
                                ::new (&threadedProfilers[i]) ProfilerNode(TEXT("Convert444Threads"), true);
                                threadedProfilers[i].MonitorThread(h420Threads[i]);
                                bUsingThreadedProfilers = true;
                                SetEvent(convertInfo[i].hSignalConvert);
                            }

                            if(bFirst420Encode)
                                bFirst420Encode = bEncode = false;
                        }
                        else
                        {
                            outTimes[curOutBuffer] = (DWORD)curStreamTime;
                            if(bUsingQSV)
                            {
                                mfxFrameData& data = picOut.mfxOut->Data;
                                videoEncoder->RequestBuffers(&data);
                                LPBYTE output[] = {data.Y, data.UV};
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, data.Pitch, outputCY, 0, outputCY, output);
                            }
                            else
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, outputCX, outputCY, 0, outputCY, picOut.picOut->img.plane);
                            prevTexture->Unmap(0);
                        }

                        profileOut;
                    }
                    else
                    {
                        outTimes[curOutBuffer] = (DWORD)curStreamTime;

                        picOut.picOut->img.i_stride[0] = map.RowPitch;
                        picOut.picOut->img.plane[0]    = (uint8_t*)map.pData;
                    }

                    if(bEncode)
                    {
                        DWORD curFrameTimestamp = outTimes[prevOutBuffer];
                        //Log(TEXT("curFrameTimestamp: %u"), curFrameTimestamp);

                        //------------------------------------

                        FrameProcessInfo frameInfo;
                        frameInfo.firstFrameTime = firstFrameTime;
                        frameInfo.prevTexture = prevTexture;

                        if(bDupeFrames)
                        {
                            while(cfrTime < curFrameTimestamp)
                            {
                                DWORD frameTimeAdjust = frameTime;
                                cfrTimeAdjust += fpsTimeNumerator;
                                if(cfrTimeAdjust > fpsTimeDenominator)
                                {
                                    cfrTimeAdjust -= fpsTimeDenominator;
                                    ++frameTimeAdjust;
                                }

                                DWORD halfTime = (frameTimeAdjust+1)/2;

                                EncoderPicture &nextPic = (curFrameTimestamp-cfrTime <= halfTime) ? picOut : prevPicOut;
                                //Log(TEXT("cfrTime: %u, time: %u"), cfrTime, curFrameTimestamp);

                                //these lines are just for counting duped frames
                                if(nextPic == lastPic)
                                    ++numTotalDuplicatedFrames;
                                else
                                    lastPic = nextPic;

                                frameInfo.pic = &nextPic;
                                if(bUsingQSV)
                                    frameInfo.pic->mfxOut->Data.TimeStamp = cfrTime;
                                else
                                    frameInfo.pic->picOut->i_pts = cfrTime;
                                frameInfo.frameTimestamp = cfrTime;
                                ProcessFrame(frameInfo);

                                cfrTime += frameTimeAdjust;

                                //Log(TEXT("cfrTime: %u, chi frame: %u"), cfrTime, (curFrameTimestamp-cfrTime <= halfTime));
                            }
                        }
                        else
                        {
                            if(bUsingQSV)
                                picOut.mfxOut->Data.TimeStamp = curFrameTimestamp;
                            else
                                picOut.picOut->i_pts = curFrameTimestamp;

                            frameInfo.pic = &picOut;
                            frameInfo.frameTimestamp = curFrameTimestamp;

                            ProcessFrame(frameInfo);
                        }

                        if (!bRunning)
                            bufferedFrames = videoEncoder->GetBufferedFrames ();
                    }

                    if(bUsing444)
                    {
                        prevTexture->Unmap(0);
                    }

                    curOutBuffer = nextOutBuffer;
                }
                else
                {
                    //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled
                    if (result == DXGI_ERROR_DEVICE_REMOVED)
                    {
                        String message;

                        HRESULT reason = GetD3D()->GetDeviceRemovedReason();

                        switch (reason)
                        {
                        case DXGI_ERROR_DEVICE_RESET:
                        case DXGI_ERROR_DEVICE_HUNG:
                            message = TEXT("Your video card or driver froze and was reset. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DEVICE_REMOVED:
                            message = TEXT("Your video card disappeared from the system. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DRIVER_INTERNAL_ERROR:
                            message = TEXT("Your video driver reported an internal error. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_INVALID_CALL:
                            message = TEXT("Your video driver reported an invalid call. Please check for possible driver issues.");
                            break;
                        default:
                            message = TEXT("DXGI_ERROR_DEVICE_REMOVED");
                            break;
                        }

                        CrashError (TEXT("Texture->Map failed: 0x%08x 0x%08x\r\n\r\n%s"), result, reason, message.Array());
                    }
                    else
                        CrashError (TEXT("Texture->Map failed: 0x%08x"), result);
                }
            }

            if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
                curCopyTexture = 0;
            else
                curCopyTexture++;

            if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                curYUVTexture = 0;
            else
                curYUVTexture++;

            if (bCongestionControl && bDynamicBitrateSupported && !bTestStream)
            {
                if (curStrain > 25)
                {
                    if (qwTime - lastAdjustmentTime > 1500)
                    {
                        if (currentBitRate > 100)
                        {
                            currentBitRate = (int)(currentBitRate * (1.0 - (curStrain / 400)));
                            App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                            if (!adjustmentStreamId)
                                adjustmentStreamId = App->AddStreamInfo (FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                            else
                                App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array());

                            bUpdateBPS = true;
                        }

                        lastAdjustmentTime = qwTime;
                    }
                }
                else if (currentBitRate < defaultBitRate && curStrain < 5 && lastStrain < 5)
                {
                    if (qwTime - lastAdjustmentTime > 5000)
                    {
                        if (currentBitRate < defaultBitRate)
                        {
                            currentBitRate += (int)(defaultBitRate * 0.05);
                            if (currentBitRate > defaultBitRate)
                                currentBitRate = defaultBitRate;
                        }

                        App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                        /*if (!adjustmentStreamId)
                            App->AddStreamInfo (FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                        else
                            App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array());*/

                        bUpdateBPS = true;

                        lastAdjustmentTime = qwTime;
                    }
                }
            }
        }

        lastRenderTarget = curRenderTarget;

        if(curRenderTarget == (NUM_RENDER_BUFFERS-1))
            curRenderTarget = 0;
        else
            curRenderTarget++;

        if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped)
        {
            PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);
            lastStrain = curStrain;

            lastFramesDropped = curFramesDropped;
        }

        //------------------------------------
        // we're about to sleep so we should flush the d3d command queue
        profileIn("flush");
        GetD3D()->Flush();
        profileOut;

        profileOut; //video encoding and uploading
        profileOut; //frame

        //------------------------------------
        // frame sync

#ifdef USE_100NS_TIME
        QWORD renderStopTime = GetQPCTime100NS();
        sleepTargetTime += frameTime100ns;

        if(bWasLaggedFrame = (sleepTargetTime <= renderStopTime))
        {
            numLongFrames++;
            if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage)
                DumpLastProfileData();
        }
        else
            SleepTo(sleepTargetTime);
#else
        DWORD renderStopTime = OSGetTime();
        DWORD totalTime = renderStopTime-renderStartTime;

        if(totalTime > frameTimeAdjust)
        {
            numLongFrames++;
            if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage)
                DumpLastProfileData();
        }
        else if(totalTime < frameTimeAdjust)
            OSSleep(frameTimeAdjust-totalTime);
#endif

        //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime);

        numTotalFrames++;
    }

    if(!bUsing444)
    {
        if(bUseThreaded420)
        {
            for(int i=0; i<numThreads; i++)
            {
                if(h420Threads[i])
                {
                    convertInfo[i].bKillThread = true;
                    SetEvent(convertInfo[i].hSignalConvert);
                    if(bUsingThreadedProfilers)
                        threadedProfilers[i].~ProfilerNode();

                    OSTerminateThread(h420Threads[i], 10000);
                    h420Threads[i] = NULL;
                }

                if(convertInfo[i].hSignalConvert)
                {
                    CloseHandle(convertInfo[i].hSignalConvert);
                    convertInfo[i].hSignalConvert = NULL;
                }

                if(convertInfo[i].hSignalComplete)
                {
                    CloseHandle(convertInfo[i].hSignalComplete);
                    convertInfo[i].hSignalComplete = NULL;
                }
            }

            if(!bFirst420Encode)
            {
                ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
                copyTexture->Unmap(0);
            }
        }

        if(bUsingQSV)
            for(int i = 0; i < NUM_OUT_BUFFERS; i++)
                delete outPics[i].mfxOut;
        else
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
            {
                x264_picture_clean(outPics[i].picOut);
                delete outPics[i].picOut;
            }
    }

    Free(h420Threads);
    Free(convertInfo);

    Log(TEXT("Total frames rendered: %d, number of frames that lagged: %d (%0.2f%%) (it's okay for some frames to lag)"), numTotalFrames, numLongFrames, (double(numLongFrames)/double(numTotalFrames))*100.0);
    if(bDupeFrames)
        Log(TEXT("Total duplicated frames: %d (%0.2f%%)"), numTotalDuplicatedFrames, (double(numTotalDuplicatedFrames)/double(numTotalFrames))*100.0);
}
示例#23
0
void EncoderVideoSource::H264_doGetNextFrame()
{
#ifdef SDKH264 
	int size = (fWidth*fHeight *3/2);
	int videoType;

	Debug(ckite_log_message, "EncoderVideoSource::H264_doGetNextFrame ENTRY\n");
	Debug(ckite_log_message, "fMaxSize = %d\n", fMaxSize);
	if (fp == NULL)
	{
		Debug(ckite_log_message, "video fp is NULL\n");
		return;
	}
	// handle per of nal
	for(int i = 0; i < 4; i++)
	{
		if(more_nal[i] != NULL)
		{
			Debug(ckite_log_message, "more_nal address %p\n", more_nal[i]);
			Debug(ckite_log_message, "more_nal len  %d\n", more_nal_len[i]);
			memcpy(fTo, more_nal[i], more_nal_len[i]);
			fFrameSize = more_nal_len[i];
			if(more_nal[i] != NULL)
			{
				delete [] more_nal[i];
				more_nal[i] = NULL;
				more_nal_len[i] = 0;
			}
			fPictureEndMarker = True;
			afterGetting(this);
			return ;
		}
	}
	computePresentationTime();
	if (strcmp(mediaType, "store") == 0)	
	{
		if (fWidth == 720 && fHeight == 576)
		{
			videoType = getLivehdFrame();
		}
		else
		{
//			getFileVideoFrame( fp, 0, (unsigned char *)fBuffer, &size, &videoType, false);
		}
	}
	else
	{
		if (fWidth == 720 && fHeight == 576)
		{
			videoType = getLivehdFrame();
		}
		else
		{
			videoGetFrameInfo(fChannel, fp, mediaType, fBuffer, &size, &videoType);
		}
	}
	if(size <= 0) return ;
	if (videoType == VIDEO_MPEG4 || videoType == VIDEO_H264)
	{
		fFrameSize = size;
	}
	else if (videoType == VIDEO_RAW)
	{
		if( x264_picture_alloc(&m_pic, m_param.i_csp, m_param.i_width, m_param.i_height) < 0)
		{
			Debug(ckite_log_message, "x264_picture_alloc is failed \n");
			return;
		}
		memcpy(m_pic.img.plane[0], fBuffer, m_param.i_width * m_param.i_height);
		memcpy(m_pic.img.plane[1], fBuffer + m_param.i_width * m_param.i_height, m_param.i_width * m_param.i_height / 4);
		memcpy(m_pic.img.plane[2], fBuffer + m_param.i_width * m_param.i_height * 5 / 4, m_param.i_width * m_param.i_height / 4);   

		static x264_picture_t pic_out;
		x264_nal_t *nal = NULL;
		int i_nal, i;

		if(x264_handle != NULL)
		{
			if( x264_encoder_encode( x264_handle, &nal, &i_nal, &m_pic, &pic_out ) < 0 )
			{
				return;
			}
		}
		int offset = 0;
		static int t = 0;
		FILE *fout;

		//unsigned char nal_type;
		Debug(ckite_log_message, "i_nal = %d\n", i_nal);
		for ( i = 0; i < i_nal; i++ )
		{
			if (t < 4)
			{
				char name[100] = {0};
				t++;
				snprintf(name, sizeof name, "nal%d.dat", t);
				fout = fopen(name, "wb+");
				size = fwrite(nal[i].p_payload,1,nal[i].i_payload,fout);
				fclose(fout);
				Debug(ckite_log_message, "size = %d\n",size);

			}
			if(nal[i].p_payload[2] == 1)
			{
				offset = 3;
				//nal_type = nal[i].p_payload[3];
			}
			else if (nal[i].p_payload[3] == 1)
			{
				offset = 4;
				//nal_type = nal[i].p_payload[4];
			}
			if(i >= 1)
			{
				if(more_nal[i-1] == NULL)
				{
					more_nal_len[i-1] = nal[i].i_payload - offset;
					more_nal[i-1] = new char [more_nal_len[i-1] + 1];
					if (more_nal[i-1] != NULL)
					{
						memset(more_nal[i-1], 0x0, nal[i].i_payload - offset + 1);
						memcpy(more_nal[i-1], nal[i].p_payload + offset, nal[i].i_payload - offset);
						//Debug(ckite_log_message, "new sucess more_nal[%d], nal size %d\n", i-1, more_nal_len[i-1]);
					}
					else
					{
						Debug(ckite_log_message, "new failed with %d nal\n", i);
					}
				}
			}
			else 
			{
				memcpy(fTo, nal[i].p_payload + offset, nal[i].i_payload - offset);
				fFrameSize = nal[i].i_payload - offset;
			}
		}
	}
	//Debug(ckite_log_message, "Deliver nal type %d with %d bytes.\n", nal_type, fFrameSize);
	fPictureEndMarker = True;
	afterGetting(this);
	x264_picture_clean(&m_pic);
#endif

}
示例#24
0
//todo: this function is an abomination, this is just disgusting.  fix it.
//...seriously, this is really, really horrible.  I mean this is amazingly bad.
void OBS::MainCaptureLoop()
{
    int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0;
    int copyWait = NUM_RENDER_BUFFERS-1;

    bSentHeaders = false;
    bFirstAudioPacket = true;

    bool bLogLongFramesProfile = GlobalConfig->GetInt(TEXT("General"), TEXT("LogLongFramesProfile"), LOGLONGFRAMESDEFAULT) != 0;
    float logLongFramesProfilePercentage = GlobalConfig->GetFloat(TEXT("General"), TEXT("LogLongFramesProfilePercentage"), 10.f);

    Vect2 baseSize    = Vect2(float(baseCX), float(baseCY));
    Vect2 outputSize  = Vect2(float(outputCX), float(outputCY));
    Vect2 scaleSize   = Vect2(float(scaleCX), float(scaleCY));

    HANDLE hMatrix   = yuvScalePixelShader->GetParameterByName(TEXT("yuvMat"));
    HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI"));

    //----------------------------------------
    // x264 input buffers

    int curOutBuffer = 0;

    bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;
    bUsing444 = false;

    EncoderPicture lastPic;
    EncoderPicture outPics[NUM_OUT_BUFFERS];

    for(int i=0; i<NUM_OUT_BUFFERS; i++)
    {
        if(bUsingQSV)
        {
            outPics[i].mfxOut = new mfxFrameSurface1;
            memset(outPics[i].mfxOut, 0, sizeof(mfxFrameSurface1));
            mfxFrameData& data = outPics[i].mfxOut->Data;
            videoEncoder->RequestBuffers(&data);
        }
        else
        {
            outPics[i].picOut = new x264_picture_t;
            x264_picture_init(outPics[i].picOut);
        }
    }

    if(bUsing444)
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
        {
            outPics[i].picOut->img.i_csp   = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
            outPics[i].picOut->img.i_plane = 1;
        }
    }
    else
    {
        if(!bUsingQSV)
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
                x264_picture_alloc(outPics[i].picOut, X264_CSP_NV12, outputCX, outputCY);
    }

    int bCongestionControl = AppConfig->GetInt (TEXT("Video Encoding"), TEXT("CongestionControl"), 0);
    bool bDynamicBitrateSupported = App->GetVideoEncoder()->DynamicBitrateSupported();
    int defaultBitRate = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int currentBitRate = defaultBitRate;
    QWORD lastAdjustmentTime = 0;
    UINT adjustmentStreamId = 0;

    //std::unique_ptr<ProfilerNode> encodeThreadProfiler;

    //----------------------------------------
    // time/timestamp stuff

    bool bWasLaggedFrame = false;

    totalStreamTime = 0;
    lastAudioTimestamp = 0;

    //----------------------------------------
    // start audio capture streams

    desktopAudio->StartCapture();
    if(micAudio) micAudio->StartCapture();

    //----------------------------------------
    // status bar/statistics stuff

    DWORD fpsCounter = 0;

    int numLongFrames = 0;
    int numTotalFrames = 0;

    bytesPerSec = 0;
    captureFPS = 0;
    curFramesDropped = 0;
    curStrain = 0.0;
    PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);

    QWORD lastBytesSent[3] = {0, 0, 0};
    DWORD lastFramesDropped = 0;
    double bpsTime = 0.0;

    double lastStrain = 0.0f;
    DWORD numSecondsWaited = 0;

    //----------------------------------------
    // 444->420 thread data

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads);
    Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads);

    zero(h420Threads, sizeof(HANDLE)*numThreads);
    zero(convertInfo, sizeof(Convert444Data)*numThreads);

    for(int i=0; i<numThreads; i++)
    {
        convertInfo[i].width  = outputCX;
        convertInfo[i].height = outputCY;
        convertInfo[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].bNV12 = bUsingQSV;
        convertInfo[i].numThreads = numThreads;

        if(i == 0)
            convertInfo[i].startY = 0;
        else
            convertInfo[i].startY = convertInfo[i-1].endY;

        if(i == (numThreads-1))
            convertInfo[i].endY = outputCY;
        else
            convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bool bEncode;
    bool bFirstFrame = true;
    bool bFirstImage = true;
    bool bFirstEncode = true;
    bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444;

    List<HANDLE> completeEvents;

    if(bUseThreaded420)
    {
        for(int i=0; i<numThreads; i++)
        {
            h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i);
            completeEvents << convertInfo[i].hSignalComplete;
        }
    }

    //----------------------------------------

    QWORD streamTimeStart  = GetQPCTimeNS();
    QWORD lastStreamTime   = 0;
    QWORD firstFrameTimeMS = streamTimeStart/1000000;
    QWORD frameLengthNS    = 1000000000/fps;

    while(WaitForSingleObject(hVideoEvent, INFINITE) == WAIT_OBJECT_0)
    {
        if (bShutdownVideoThread)
            break;

        QWORD renderStartTime = GetQPCTimeNS();
        totalStreamTime = DWORD((renderStartTime-streamTimeStart)/1000000);

        bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled;

        QWORD renderStartTimeMS = renderStartTime/1000000;

        QWORD curStreamTime = latestVideoTimeNS;
        if (!lastStreamTime)
            lastStreamTime = curStreamTime-frameLengthNS;
        QWORD frameDelta = curStreamTime-lastStreamTime;
        //if (!lastStreamTime)
        //    lastStreamTime = renderStartTime-frameLengthNS;
        //QWORD frameDelta = renderStartTime-lastStreamTime;
        double fSeconds = double(frameDelta)*0.000000001;
        //lastStreamTime = renderStartTime;

        bool bUpdateBPS = false;

        profileIn("video thread frame");

        //Log(TEXT("Stream Time: %llu"), curStreamTime);
        //Log(TEXT("frameDelta: %lf"), fSeconds);

        //------------------------------------

        if(bRequestKeyframe && keyframeWait > 0)
        {
            keyframeWait -= int(frameDelta);

            if(keyframeWait <= 0)
            {
                GetVideoEncoder()->RequestKeyframe();
                bRequestKeyframe = false;
            }
        }

        if(!pushToTalkDown && pushToTalkTimeLeft > 0)
        {
            pushToTalkTimeLeft -= int(frameDelta);
            OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft);
            if(pushToTalkTimeLeft <= 0)
            {
                pushToTalkTimeLeft = 0;
                bPushToTalkOn = false;
            }
        }

        //------------------------------------

        OSEnterMutex(hSceneMutex);

        if (bPleaseEnableProjector)
            ActuallyEnableProjector();
        else if(bPleaseDisableProjector)
            DisableProjector();

        if(bResizeRenderView)
        {
            GS->ResizeView();
            bResizeRenderView = false;
        }

        //------------------------------------

        if(scene)
        {
            profileIn("scene->Preprocess");
            scene->Preprocess();

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Preprocess();

            profileOut;

            scene->Tick(float(fSeconds));

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Tick(float(fSeconds));
        }

        //------------------------------------

        QWORD curBytesSent = 0;
        
        if (network) {
            curBytesSent = network->GetCurrentSentBytes();
            curFramesDropped = network->NumDroppedFrames();
        } else if (numSecondsWaited) {
            //reset stats if the network disappears
            bytesPerSec = 0;
            bpsTime = 0;
            numSecondsWaited = 0;
            curBytesSent = 0;
            zero(lastBytesSent, sizeof(lastBytesSent));
        }

        bpsTime += fSeconds;
        if(bpsTime > 1.0f)
        {
            if(numSecondsWaited < 3)
                ++numSecondsWaited;

            //bytesPerSec = DWORD(curBytesSent - lastBytesSent);
            bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited;

            if(bpsTime > 2.0)
                bpsTime = 0.0f;
            else
                bpsTime -= 1.0;

            if(numSecondsWaited == 3)
            {
                lastBytesSent[0] = lastBytesSent[1];
                lastBytesSent[1] = lastBytesSent[2];
                lastBytesSent[2] = curBytesSent;
            }
            else
                lastBytesSent[numSecondsWaited] = curBytesSent;

            captureFPS = fpsCounter;
            fpsCounter = 0;

            bUpdateBPS = true;
        }

        fpsCounter++;

        if(network) curStrain = network->GetPacketStrain();

        EnableBlending(TRUE);
        BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA);

        //------------------------------------
        // render the mini render texture

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(mainPixelShader);

        SetRenderTarget(mainRenderTextures[curRenderTarget]);

        Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0, 0, baseSize.x, baseSize.y);

        if(scene)
            scene->Render();

        //------------------------------------

        if(bTransitioning)
        {
            if(!transitionTexture)
            {
                transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE);
                if(transitionTexture)
                {
                    D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture);
                    D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]);
                    GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture);
                }
                else
                    bTransitioning = false;
            }
            else if(transitionAlpha >= 1.0f)
            {
                delete transitionTexture;
                transitionTexture = NULL;

                bTransitioning = false;
            }
        }

        if(bTransitioning)
        {
            EnableBlending(TRUE);
            transitionAlpha += float(fSeconds)*5.0f;
            if(transitionAlpha > 1.0f)
                transitionAlpha = 1.0f;
        }
        else
            EnableBlending(FALSE);

        //------------------------------------
        // render the mini view thingy

        if (bProjector) {
            SetRenderTarget(projectorTexture);

            Vect2 renderFrameSize, renderFrameOffset;
            Vect2 projectorSize = Vect2(float(projectorWidth), float(projectorHeight));

            float projectorAspect = (projectorSize.x / projectorSize.y);
            float baseAspect = (baseSize.x / baseSize.y);

            if (projectorAspect < baseAspect) {
                float fProjectorWidth = float(projectorWidth);

                renderFrameSize   = Vect2(fProjectorWidth, fProjectorWidth / baseAspect);
                renderFrameOffset = Vect2(0.0f, (projectorSize.y-renderFrameSize.y) * 0.5f);
            } else {
                float fProjectorHeight = float(projectorHeight);

                renderFrameSize   = Vect2(fProjectorHeight * baseAspect, fProjectorHeight);
                renderFrameOffset = Vect2((projectorSize.x-renderFrameSize.x) * 0.5f, 0.0f);
            }

            DrawPreview(renderFrameSize, renderFrameOffset, projectorSize, curRenderTarget, Preview_Projector);

            SetRenderTarget(NULL);
        }

        if(bRenderView)
        {
            // Cache
            const Vect2 renderFrameSize = GetRenderFrameSize();
            const Vect2 renderFrameOffset = GetRenderFrameOffset();
            const Vect2 renderFrameCtrlSize = GetRenderFrameControlSize();

            SetRenderTarget(NULL);
            DrawPreview(renderFrameSize, renderFrameOffset, renderFrameCtrlSize, curRenderTarget,
                    bFullscreenMode ? Preview_Fullscreen : Preview_Standard);

            //draw selections if in edit mode
            if(bEditMode && !bSizeChanging)
            {
                if(scene) {
                    LoadVertexShader(solidVertexShader);
                    LoadPixelShader(solidPixelShader);
                    solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFF0000);
                    scene->RenderSelections(solidPixelShader);
                }
            }
        }
        else if(bForceRenderViewErase)
        {
            InvalidateRect(hwndRenderFrame, NULL, TRUE);
            UpdateWindow(hwndRenderFrame);
            bForceRenderViewErase = false;
        }

        //------------------------------------
        // actual stream output

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(yuvScalePixelShader);

        Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget];
        SetRenderTarget(yuvRenderTexture);

        switch(colorDesc.matrix)
        {
        case ColorMatrix_GBR:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[0] : (float*)yuvMat[0]);
            break;
        case ColorMatrix_YCgCo:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[1] : (float*)yuvMat[1]);
            break;
        case ColorMatrix_BT2020NCL:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[2] : (float*)yuvMat[2]);
            break;
        case ColorMatrix_BT709:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[3] : (float*)yuvMat[3]);
            break;
        case ColorMatrix_SMPTE240M:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[4] : (float*)yuvMat[4]);
            break;
        default:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[5] : (float*)yuvMat[5]);
        }

        if(downscale < 2.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize);
        else if(downscale < 3.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/(outputSize*3.0f));

        Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y);

        //why am I using scaleSize instead of outputSize for the texture?
        //because outputSize can be trimmed by up to three pixels due to 128-bit alignment.
        //using the scale function with outputSize can cause slightly inaccurate scaled images
        if(bTransitioning)
        {
            BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
            DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, 1.0f, 1.0f);
            BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
        }

        DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, 1.0f, 1.0f);

        //------------------------------------

        if (bProjector && !copyWait)
            projectorSwap->Present(0, 0);

        if(bRenderView && !copyWait)
            static_cast<D3D10System*>(GS)->swap->Present(0, 0);

        OSLeaveMutex(hSceneMutex);

        //------------------------------------
        // present/upload

        profileIn("GPU download and conversion");

        bEncode = true;

        if(copyWait)
        {
            copyWait--;
            bEncode = false;
        }
        else
        {
            //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing
            if(!bRecievedFirstAudioFrame)
            {
                static bool bWarnedAboutNoAudio = false;
                if (renderStartTimeMS-firstFrameTimeMS > 10000 && !bWarnedAboutNoAudio)
                {
                    bWarnedAboutNoAudio = true;
                    //AddStreamInfo (TEXT ("WARNING: OBS is not receiving audio frames. Please check your audio devices."), StreamInfoPriority_Critical); 
                }
                bEncode = false;
            }
            else if(bFirstFrame)
            {
                firstFrameTimestamp = lastStreamTime/1000000;
                bFirstFrame = false;
            }

            if(!bEncode)
            {
                if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                    curYUVTexture = 0;
                else
                    curYUVTexture++;
            }
        }

        lastStreamTime = curStreamTime;

        if(bEncode)
        {
            UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1;

            ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
            profileIn("CopyResource");

            if(!bFirstEncode && bUseThreaded420)
            {
                WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE);
                copyTexture->Unmap(0);
            }

            D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]);
            GetD3D()->CopyResource(copyTexture, d3dYUV->texture);
            profileOut;

            ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture];

            if(bFirstImage) //ignore the first frame
                bFirstImage = false;
            else
            {
                HRESULT result;
                D3D10_MAPPED_TEXTURE2D map;
                if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map)))
                {
                    int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1;
                    int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1;

                    EncoderPicture &prevPicOut = outPics[prevOutBuffer];
                    EncoderPicture &picOut = outPics[curOutBuffer];
                    EncoderPicture &nextPicOut = outPics[nextOutBuffer];

                    if(!bUsing444)
                    {
                        profileIn("conversion to 4:2:0");

                        if(bUseThreaded420)
                        {
                            for(int i=0; i<numThreads; i++)
                            {
                                convertInfo[i].input     = (LPBYTE)map.pData;
                                convertInfo[i].inPitch   = map.RowPitch;
                                if(bUsingQSV)
                                {
                                    mfxFrameData& data = nextPicOut.mfxOut->Data;
                                    videoEncoder->RequestBuffers(&data);
                                    convertInfo[i].outPitch  = data.Pitch;
                                    convertInfo[i].output[0] = data.Y;
                                    convertInfo[i].output[1] = data.UV;
                                }
                                else
                                {
                                    convertInfo[i].output[0] = nextPicOut.picOut->img.plane[0];
                                    convertInfo[i].output[1] = nextPicOut.picOut->img.plane[1];
                                    convertInfo[i].output[2] = nextPicOut.picOut->img.plane[2];
								}
                                SetEvent(convertInfo[i].hSignalConvert);
                            }

                            if(bFirstEncode)
                                bFirstEncode = bEncode = false;
                        }
                        else
                        {
                            if(bUsingQSV)
                            {
                                mfxFrameData& data = picOut.mfxOut->Data;
                                videoEncoder->RequestBuffers(&data);
                                LPBYTE output[] = {data.Y, data.UV};
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, data.Pitch, outputCY, 0, outputCY, output);
                            }
                            else
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, outputCX, outputCY, 0, outputCY, picOut.picOut->img.plane);
                            prevTexture->Unmap(0);
                        }

                        profileOut;
                    }

                    if(bEncode)
                    {
                        //encodeThreadProfiler.reset(::new ProfilerNode(TEXT("EncodeThread"), true));
                        //encodeThreadProfiler->MonitorThread(hEncodeThread);
                        curFramePic = &picOut;
                    }

                    curOutBuffer = nextOutBuffer;
                }
                else
                {
                    //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled
                    if (result == DXGI_ERROR_DEVICE_REMOVED)
                    {
                        String message;

                        HRESULT reason = GetD3D()->GetDeviceRemovedReason();

                        switch (reason)
                        {
                        case DXGI_ERROR_DEVICE_RESET:
                        case DXGI_ERROR_DEVICE_HUNG:
                            message = TEXT("Your video card or driver froze and was reset. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DEVICE_REMOVED:
                            message = TEXT("Your video card disappeared from the system. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DRIVER_INTERNAL_ERROR:
                            message = TEXT("Your video driver reported an internal error. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_INVALID_CALL:
                            message = TEXT("Your video driver reported an invalid call. Please check for possible driver issues.");
                            break;
                        default:
                            message = TEXT("DXGI_ERROR_DEVICE_REMOVED");
                            break;
                        }

                        message << TEXT(" This error can also occur if you have enabled opencl in x264 custom settings.");

                        CrashError (TEXT("Texture->Map failed: 0x%08x 0x%08x\r\n\r\n%s"), result, reason, message.Array());
                    }
                    else
                        CrashError (TEXT("Texture->Map failed: 0x%08x"), result);
                }
            }

            if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
                curCopyTexture = 0;
            else
                curCopyTexture++;

            if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                curYUVTexture = 0;
            else
                curYUVTexture++;

            if (bCongestionControl && bDynamicBitrateSupported && !bTestStream && totalStreamTime > 15000)
            {
                if (curStrain > 25)
                {
                    if (renderStartTimeMS - lastAdjustmentTime > 1500)
                    {
                        if (currentBitRate > 100)
                        {
                            currentBitRate = (int)(currentBitRate * (1.0 - (curStrain / 400)));
                            App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                            if (!adjustmentStreamId)
                                adjustmentStreamId = App->AddStreamInfo (FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                            else
                                App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array());

                            bUpdateBPS = true;
                        }

                        lastAdjustmentTime = renderStartTimeMS;
                    }
                }
                else if (currentBitRate < defaultBitRate && curStrain < 5 && lastStrain < 5)
                {
                    if (renderStartTimeMS - lastAdjustmentTime > 5000)
                    {
                        if (currentBitRate < defaultBitRate)
                        {
                            currentBitRate += (int)(defaultBitRate * 0.05);
                            if (currentBitRate > defaultBitRate)
                                currentBitRate = defaultBitRate;
                        }

                        App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                        /*if (!adjustmentStreamId)
                            App->AddStreamInfo (FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                        else
                            App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array());*/

                        App->RemoveStreamInfo(adjustmentStreamId);
                        adjustmentStreamId = 0;

                        bUpdateBPS = true;

                        lastAdjustmentTime = renderStartTimeMS;
                    }
                }
            }
        }

        lastRenderTarget = curRenderTarget;

        if(curRenderTarget == (NUM_RENDER_BUFFERS-1))
            curRenderTarget = 0;
        else
            curRenderTarget++;

        if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped)
        {
            PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);
            lastStrain = curStrain;

            lastFramesDropped = curFramesDropped;
        }

        //------------------------------------
        // we're about to sleep so we should flush the d3d command queue
        profileIn("flush");
        GetD3D()->Flush();
        profileOut;
        profileOut;
        profileOut; //frame

        //------------------------------------
        // frame sync

        //QWORD renderStopTime = GetQPCTimeNS();

        if(bWasLaggedFrame = (frameDelta > frameLengthNS))
        {
            numLongFrames++;
            if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage)
                DumpLastProfileData();
        }

        //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime);

        numTotalFrames++;
    }

    DisableProjector();

    //encodeThreadProfiler.reset();

    if(!bUsing444)
    {
        if(bUseThreaded420)
        {
            for(int i=0; i<numThreads; i++)
            {
                if(h420Threads[i])
                {
                    convertInfo[i].bKillThread = true;
                    SetEvent(convertInfo[i].hSignalConvert);

                    OSTerminateThread(h420Threads[i], 10000);
                    h420Threads[i] = NULL;
                }

                if(convertInfo[i].hSignalConvert)
                {
                    CloseHandle(convertInfo[i].hSignalConvert);
                    convertInfo[i].hSignalConvert = NULL;
                }

                if(convertInfo[i].hSignalComplete)
                {
                    CloseHandle(convertInfo[i].hSignalComplete);
                    convertInfo[i].hSignalComplete = NULL;
                }
            }

            if(!bFirstEncode)
            {
                ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
                copyTexture->Unmap(0);
            }
        }

        if(bUsingQSV)
            for(int i = 0; i < NUM_OUT_BUFFERS; i++)
                delete outPics[i].mfxOut;
        else
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
            {
                x264_picture_clean(outPics[i].picOut);
                delete outPics[i].picOut;
            }
    }

    Free(h420Threads);
    Free(convertInfo);

    Log(TEXT("Total frames rendered: %d, number of late frames: %d (%0.2f%%) (it's okay for some frames to be late)"), numTotalFrames, numLongFrames, (numTotalFrames > 0) ? (double(numLongFrames)/double(numTotalFrames))*100.0 : 0.0f);
}
示例#25
0
//uninitialize the ctx structure
void uninit_ctx()
{

//    avpicture_free(&ctx.pic_src);
    x264_picture_clean(&ctx.pic_xsrc);
}
示例#26
0
int main(int argc, char** argv)
{
  int listenfd, connfd;
  int byterecv;
  int bytesum = 0;
  int ret = 0;
  struct sockaddr_in servaddr;
  char *buff;
  int width, height;
  x264_param_t param;
  x264_picture_t pic;
  x264_picture_t pic_out;
  x264_t *h;
  int i_frame = 0;
  int i_frame_size;
  x264_nal_t *nal;
  int i_nal;
  FILE *fout = NULL;

   /* Get default params for preset/tuning */
  if(x264_param_default_preset( &param, "medium", NULL ) < 0)
    goto fail;

  /* Configure non-default params */
  param.i_csp = X264_CSP_I420;
  param.i_width  = WIDTH;
  param.i_height = HEIGHT;
  param.b_vfr_input = 0; //frame rate
  param.b_repeat_headers = 1;
  param.b_annexb = 1;

  /* Apply profile restrictions. */
  if(x264_param_apply_profile(&param, "baseline") < 0)
    goto fail;

  if(x264_picture_alloc(&pic, param.i_csp, param.i_width, param.i_height) < 0)
    goto fail;

#undef fail
#define fail fail2

  h = x264_encoder_open(&param);
  if(!h)
    goto fail;
  printf("x264 encoder init successfully\n");

  int luma_size = width * height;
  int chroma_size = luma_size / 4;
  fout = fopen("test.264", "wb+");

#undef fail
#define fail fail3

  /* socket init*/
  if((listenfd = socket(AF_INET, SOCK_STREAM, 0)) == -1){
    printf("create socket error: %s(errno: %d)\n",strerror(errno),errno);
    exit(0);
  }

  memset(&servaddr, 0, sizeof(servaddr));
  servaddr.sin_family = AF_INET;
  servaddr.sin_addr.s_addr = htonl(INADDR_ANY);
  servaddr.sin_port = htons(6666);

  if(bind(listenfd, (struct sockaddr*)&servaddr, sizeof(servaddr)) == -1){
    printf("bind socket error: %s(errno: %d)\n",strerror(errno),errno);
    exit(0);
  }

  if(listen(listenfd, 10) == -1){
    printf("listen socket error: %s(errno: %d)\n",strerror(errno),errno);
    exit(0);
  }

  printf("======waiting for client's request======\n");
  if((connfd = accept(listenfd, (struct sockaddr*)NULL, NULL)) == -1){
    printf("accept socket error: %s(errno: %d)",strerror(errno),errno);
    exit(0);
  }

  /* malloc 5k buffer for recv raw data from client*/
  buff = (char *)malloc(5 * 1024 * 1024 * sizeof(char));
  while(1) {
    byterecv = recv(connfd, buff + bytesum, 32 * 1024, 0);
    if (byterecv > 0) {
      bytesum += byterecv;
      /* got one frame here, encode it*/
      if (bytesum >= RESOLUTION * 1.5) {
        pic.img.plane[0] = buff;
        pic.img.plane[1] = buff + luma_size;
        pic.img.plane[2] = buff + luma_size + chroma_size;
        pic.i_pts = i_frame;
        i_frame ++;

        i_frame_size = x264_encoder_encode(h, &nal, &i_nal, &pic, &pic_out);
        if(i_frame_size < 0)
          goto fail;
        else if(i_frame_size) {
          if(!fwrite( nal->p_payload, i_frame_size, 1, fout))
            goto fail;
          printf("encode frame %d\n", i_frame);
        }
        bytesum = bytesum - RESOLUTION * 1.5;
        memcpy(buff, buff + (int)(RESOLUTION * 1.5), bytesum);
      }
    }
    else if((byterecv < 0) && (errno == EAGAIN || errno == EINTR)) {
      printf("coutinue\n");
      continue;
    }
    else {
      //printf("stop recv stream frome client, bytesum: %d, total: %d\n", bytesum, total);
      break;
    }
  }

  /* Flush delayed frames */
  while(x264_encoder_delayed_frames(h)) {
    i_frame_size = x264_encoder_encode(h, &nal, &i_nal, NULL, &pic_out);
    if(i_frame_size < 0)
      goto fail;
    else if(i_frame_size) {
      if(!fwrite( nal->p_payload, i_frame_size, 1, fout))
        goto fail;
    }
  }

  if (fout)
    fclose(fout);

  x264_encoder_close(h);
  x264_picture_clean(&pic);

  close(connfd);
  close(listenfd);
  return 0;

#undef fail
fail3:
  x264_encoder_close(h);
fail2:
  x264_picture_clean(&pic);
fail:
  return -1;
}
示例#27
0
文件: encode.cpp 项目: sudokuhk/media
int main(int argc, char * argv[])
{
    int fps     = 15;
    int bitRate = 24;
    int width   = 640;
    int height  = 480;
    int yuvsize = (width * height * 3) >> 1;
    uint8_t * yuv = (uint8_t *)malloc(yuvsize);
    
    x264_picture_t *_picIn;
	x264_picture_t *_picOut;
    x264_t * x264_handle_        = NULL;
    x264_param_t * x264_param_   = (x264_param_t *)malloc(sizeof(x264_param_t));
    
    init_param(x264_param_, width, height, fps, bitRate);  
    
	_picIn = (x264_picture_t*)malloc(sizeof(x264_picture_t));
	_picOut = (x264_picture_t*)malloc(sizeof(x264_picture_t));
	
	if(x264_picture_alloc(_picIn, X264_CSP_I420, width, height) < 0 ) {
	    printf("x264_picture_alloc failed!\n");
	    exit(0);
	}
	
	_picIn->img.i_csp   = X264_CSP_I420;  
	_picIn->img.i_plane = 3; 

	x264_handle_ = x264_encoder_open(x264_param_);
	if(!x264_handle_) {
		printf("x264_encoder_open failed!\n");
		exit(0);
	}
	init_head(x264_handle_, NULL, 0);
	
	if (argc < 2) {
	    printf("please input yuv file name!\n");
	    exit(0);
	}
	const char * file = argv[1];
	printf("encode yuv file:%s\n", file);
	
	struct stat sb;
	if (stat(file, &sb) == -1) {
	    printf("stat file:%s failed!\n", file);
	    exit(0);
	}
	
	if ((sb.st_mode & S_IFMT) != S_IFREG || sb.st_size <  yuvsize) {
	    printf("file:%s invalid, size:%ld, regfile:%d!\n", file, sb.st_size, (sb.st_mode & S_IFMT) == S_IFREG);
	    exit(0);
	}
	
	int rfd = open(file, O_RDONLY);
	if (rfd < 0) {
	    printf("open file:%s failed!\n", file);
	    exit(0);
	}
	
	int off = 0, rdn = 0;
	if ((rdn = read(rfd, yuv, yuvsize - off)) != yuvsize) {
        printf("read no enough data. readn:%d, yuvsize:%d\n", rdn, yuvsize);
	} 
	close(rfd);
	
	int planesize = width * height;
	memcpy(_picIn->img.plane[0], yuv, planesize);
	memcpy(_picIn->img.plane[1], yuv + planesize, planesize >> 2);
	memcpy(_picIn->img.plane[2], yuv + planesize + (planesize >> 2), planesize >> 2);
	
	x264_nal_t * nal;
	int nalcnt = 0;
	
	int encodesize = x264_encoder_encode(x264_handle_, &nal, &nalcnt, _picIn, _picOut);
	if (encodesize <= 0) {
	    printf("encode failed. size:%d\n", encodesize);
	    exit(0);
	}
	
	const char * outfile = "h264_640x480.bin";
	int wfd = open(outfile, O_CREAT | O_RDWR, S_IRUSR | S_IWUSR | S_IRGRP);
	if (wfd < 0) {
	    printf("open outfile:%s failed!\n", outfile);
	    exit(0);
	}
	write(wfd, nal->p_payload, encodesize);
	close(wfd);
	
	x264_encoder_close(x264_handle_);
	x264_picture_clean(_picIn);
	free(_picIn);
	free(_picOut);
	free(yuv);
	free(x264_param_);
	
	return 0;
}
示例#28
0
int main(int argc, char **argv) {

    int ret = 0;
    int y_size = 0;
    int i = 0;
    int j = 0;

    //Encode 50 frame
    //if set 0, encode all frame
    int frame_num = 50;
    int csp = X264_CSP_I420;
    int width = 640;
    int height = 360;

    int i_nal = 0;
    x264_nal_t *p_nals = NULL;
    x264_t *p_handle = NULL;
    x264_picture_t *p_pic_in = (x264_picture_t *) malloc(sizeof(x264_picture_t));
    x264_picture_t *p_pic_out = (x264_picture_t *) malloc(sizeof(x264_picture_t));
    x264_param_t *p_param = (x264_param_t *) malloc(sizeof(x264_param_t));

    //FILE* fp_src  = fopen("../cuc_ieschool_640x360_yuv444p.yuv", "rb");
    FILE *fp_src = fopen("../cuc_ieschool_640x360_yuv420p.yuv", "rb");

    FILE *fp_dst = fopen("cuc_ieschool.h264", "wb");

    //Check
    if ((NULL == fp_src) || (NULL == fp_dst)) {
        lwlog_err("Open files error.");
        return -1;
    }

    x264_param_default(p_param);
    p_param->i_width = width;
    p_param->i_height = height;
    /*
    //Param
    p_param->i_log_level  = X264_LOG_DEBUG;
    p_param->i_threads  = X264_SYNC_LOOKAHEAD_AUTO;
    p_param->i_frame_total = 0;
    p_param->i_keyint_max = 10;
    p_param->i_bframe  = 5;
    p_param->b_open_gop  = 0;
    p_param->i_bframe_pyramid = 0;
    p_param->rc.i_qp_constant=0;
    p_param->rc.i_qp_max=0;
    p_param->rc.i_qp_min=0;
    p_param->i_bframe_adaptive = X264_B_ADAPT_TRELLIS;
    p_param->i_fps_den  = 1;
    p_param->i_fps_num  = 25;
    p_param->i_timebase_den = p_param->i_fps_num;
    p_param->i_timebase_num = p_param->i_fps_den;
    */
    p_param->i_csp = csp;
    x264_param_apply_profile(p_param, x264_profile_names[5]);

    p_handle = x264_encoder_open(p_param);

    x264_picture_init(p_pic_out);
    x264_picture_alloc(p_pic_in, csp, p_param->i_width, p_param->i_height);

    //ret = x264_encoder_headers(p_handle, &p_nals, &i_nal);

    y_size = p_param->i_width * p_param->i_height;
    //detect frame number
    if (0 == frame_num) {
        fseek(fp_src, 0, SEEK_END);
        switch (csp) {
        case X264_CSP_I444:
            frame_num = ftell(fp_src) / (y_size * 3);
            break;
        case X264_CSP_I420:
            frame_num = ftell(fp_src) / (y_size * 3 / 2);
            break;
        default:
            lwlog_err("Colorspace Not Support");
            return -1;
        }
        fseek(fp_src, 0, SEEK_SET);
    }

    //Loop to Encode
    for (i = 0; i < frame_num; ++i) {
        switch (csp) {
        case X264_CSP_I444: {
            fread(p_pic_in->img.plane[0], y_size, 1, fp_src);    //Y
            fread(p_pic_in->img.plane[1], y_size, 1, fp_src);    //U
            fread(p_pic_in->img.plane[2], y_size, 1, fp_src);    //V
            break;
        }
        case X264_CSP_I420: {
            fread(p_pic_in->img.plane[0], y_size, 1, fp_src);    //Y
            fread(p_pic_in->img.plane[1], y_size / 4, 1, fp_src);    //U
            fread(p_pic_in->img.plane[2], y_size / 4, 1, fp_src);    //V
            break;
        }
        default: {
            lwlog_err("Colorspace Not Support.\n");
            return -1;
        }
        }
        p_pic_in->i_pts = i;

        ret = x264_encoder_encode(p_handle, &p_nals, &i_nal, p_pic_in, p_pic_out);
        if (ret < 0) {
            lwlog_err("x264_encoder_encode error");
            return -1;
        }

        lwlog_info("Succeed encode frame: %5d\n", i);

        for (j = 0; j < i_nal; ++j) {
            fwrite(p_nals[j].p_payload, 1, p_nals[j].i_payload, fp_dst);
        }
    }
    i = 0;
    //flush encoder
    while (1) {
        ret = x264_encoder_encode(p_handle, &p_nals, &i_nal, NULL, p_pic_out);
        if (0 == ret) {
            break;
        }
        lwlog_info("Flush 1 frame.");
        for (j = 0; j < i_nal; ++j) {
            fwrite(p_nals[j].p_payload, 1, p_nals[j].i_payload, fp_dst);
        }
        ++i;
    }
    x264_picture_clean(p_pic_in);
    x264_encoder_close(p_handle);
    p_handle = NULL;

    free(p_pic_in);
    free(p_pic_out);
    free(p_param);

    fclose(fp_src);
    fclose(fp_dst);

    return 0;
}
示例#29
0
gpointer
x264_gtk_encode_encode (X264_Thread_Data *thread_data)
{
  GIOStatus       status;
  gsize           size;
  X264_Pipe_Data  pipe_data;
  x264_param_t   *param;
  x264_picture_t  pic;
  x264_t         *h;
  hnd_t           hin;
  hnd_t           hout;
  int             i_frame;
  int             i_frame_total;
  int64_t         i_start;
  int64_t         i_end;
  int64_t         i_file;
  int             i_frame_size;
  int             i_progress;
  int             err;

  g_print (_("encoding...\n"));
  param = thread_data->param;
  err = x264_set_drivers (thread_data->in_container, thread_data->out_container);
  if (err < 0) {
    GtkWidget *no_driver;
    no_driver = gtk_message_dialog_new (GTK_WINDOW(thread_data->dialog),
                                        GTK_DIALOG_DESTROY_WITH_PARENT,
                                        GTK_MESSAGE_ERROR,
                                        GTK_BUTTONS_CLOSE,
                                        (err == -2) ? _("Error: unknown output file type")
                                                    : _("Error: unknown input file type"));
    gtk_dialog_run (GTK_DIALOG (no_driver));
    gtk_widget_destroy (no_driver);
    return NULL;
  }

  if (p_open_infile (thread_data->file_input, &hin, param)) {
    fprintf( stderr, _("could not open input file '%s'\n"), thread_data->file_input );
    return NULL;
  }

  p_open_outfile ((char *)thread_data->file_output, &hout);

  i_frame_total = p_get_frame_total (hin );
  if (((i_frame_total == 0) || (param->i_frame_total < i_frame_total)) &&
      (param->i_frame_total > 0))
    i_frame_total = param->i_frame_total;
  param->i_frame_total = i_frame_total;

  if ((h = x264_encoder_open (param)) == NULL)
    {
      fprintf (stderr, _("x264_encoder_open failed\n"));
      p_close_infile (hin);
      p_close_outfile (hout);
      g_free (param);

      return NULL;
    }

  if (p_set_outfile_param (hout, param))
    {
      fprintf (stderr, _("can't set outfile param\n"));
      p_close_infile (hin);
      p_close_outfile (hout);
      g_free (param);

      return NULL;
    }

  /* Create a new pic */
  x264_picture_alloc (&pic, X264_CSP_I420, param->i_width, param->i_height );

  i_start = x264_mdate();

  /* Encode frames */
  for (i_frame = 0, i_file = 0, i_progress = 0;
       ((i_frame < i_frame_total) || (i_frame_total == 0)); )
    {
      if (p_read_frame (&pic, hin, i_frame))
        break;

      pic.i_pts = (int64_t)i_frame * param->i_fps_den;

      i_file += x264_encode_frame (h, hout, &pic);

      i_frame++;

      /* update status line (up to 1000 times per input file) */
      if (param->i_log_level < X264_LOG_DEBUG &&
          (i_frame_total ? i_frame * 1000 / i_frame_total > i_progress
           : i_frame % 10 == 0))
        {
          int64_t i_elapsed = x264_mdate () - i_start;

          if (i_frame_total)
            {
              pipe_data.frame = i_frame;
              pipe_data.frame_total = i_frame_total;
              pipe_data.file = i_file;
              pipe_data.elapsed = i_elapsed;
              status = g_io_channel_write_chars (thread_data->io_write,
                                                 (const gchar *)&pipe_data,
                                                 sizeof (X264_Pipe_Data),
                                                 &size, NULL);
              if (status != G_IO_STATUS_NORMAL) {
                g_print (_("Error ! %d %d %d\n"), status, (int)sizeof (X264_Pipe_Data), (int)size);
              }
              else {
                /* we force the GIOChannel to write to the pipeline */
                status = g_io_channel_flush (thread_data->io_write,
                                             NULL);
                if (status != G_IO_STATUS_NORMAL) {
                  g_print (_("Error ! %d\n"), status);
                }
              }
            }
        }
    }
  /* Flush delayed B-frames */
  do {
    i_file += i_frame_size = x264_encode_frame (h, hout, NULL);
  } while (i_frame_size);

  i_end = x264_mdate ();
  x264_picture_clean (&pic);
  x264_encoder_close (h);
  fprintf (stderr, "\n");

  p_close_infile (hin);
  p_close_outfile (hout);

  if (i_frame > 0) {
    double fps = (double)i_frame * (double)1000000 /
      (double)(i_end - i_start);

    fprintf (stderr, _("encoded %d frames, %.2f fps, %.2f kb/s\n"),
             i_frame, fps,
             (double) i_file * 8 * param->i_fps_num /
             ((double) param->i_fps_den * i_frame * 1000));
  }

  gtk_widget_set_sensitive (thread_data->end_button, TRUE);
  gtk_widget_hide (thread_data->button);
  return NULL;
}
示例#30
0
文件: x264.c 项目: clzhan/x264-vs2008
/*****************************************************************************
 * Encode:
 *****************************************************************************/
static int  Encode( x264_param_t *param, cli_opt_t *opt )
{
    x264_t *h;
    x264_picture_t pic;

    int     i_frame, i_frame_total;
    int64_t i_start, i_end;
    int64_t i_file;
    int     i_frame_size;
    int     i_progress;

    i_frame_total = p_get_frame_total( opt->hin );
    i_frame_total -= opt->i_seek;
    if( ( i_frame_total == 0 || param->i_frame_total < i_frame_total )
        && param->i_frame_total > 0 )
        i_frame_total = param->i_frame_total;
    param->i_frame_total = i_frame_total;

    if( ( h = x264_encoder_open( param ) ) == NULL )
    {
        fprintf( stderr, "x264_encoder_open failed\n" );
        p_close_infile( opt->hin );
        p_close_outfile( opt->hout );
        return -1;
    }

    if( p_set_outfile_param( opt->hout, param ) )
    {
        fprintf( stderr, "can't set outfile param\n" );
        p_close_infile( opt->hin );
        p_close_outfile( opt->hout );
        return -1;
    }

    /* Create a new pic */
    x264_picture_alloc( &pic, X264_CSP_I420, param->i_width, param->i_height );

    i_start = x264_mdate();

    /* Encode frames */
    for( i_frame = 0, i_file = 0, i_progress = 0;
         b_ctrl_c == 0 && (i_frame < i_frame_total || i_frame_total == 0); )
    {
        if( p_read_frame( &pic, opt->hin, i_frame + opt->i_seek ) )
            break;

        pic.i_pts = (int64_t)i_frame * param->i_fps_den;

        if( opt->qpfile )
            parse_qpfile( opt, &pic, i_frame + opt->i_seek );
        else
        {
            /* Do not force any parameters */
            pic.i_type = X264_TYPE_AUTO;
            pic.i_qpplus1 = 0;
        }

        i_file += Encode_frame( h, opt->hout, &pic );

        i_frame++;

        /* update status line (up to 1000 times per input file) */
        if( opt->b_progress 
			&& 
			param->i_log_level < X264_LOG_DEBUG 
			&& 
            ( i_frame_total ? i_frame * 1000 / i_frame_total > i_progress : i_frame % 10 == 0 ) )
        {
            int64_t i_elapsed = x264_mdate() - i_start;
            double fps = i_elapsed > 0 ? i_frame * 1000000. / i_elapsed : 0;
            
			if( i_frame_total )
            {
                int eta = i_elapsed * (i_frame_total - i_frame) / ((int64_t)i_frame * 1000000);
                i_progress = i_frame * 1000 / i_frame_total;
                fprintf( stderr, "encoded frames: %d/%d (%.1f%%), %.2f fps, eta %d:%02d:%02d  \r",
                         i_frame, i_frame_total, (float)i_progress / 10, fps,
                         eta/3600, (eta/60)%60, eta%60 );
            }
            else
                fprintf( stderr, "encoded frames: %d, %.2f fps   \r", i_frame, fps );
            
			fflush( stderr ); // needed in windows
        }
    }
    /* Flush delayed B-frames */
    do 
	{
        i_file += i_frame_size = Encode_frame( h, opt->hout, NULL );
    } while( i_frame_size );

    i_end = x264_mdate();
    x264_picture_clean( &pic );
    x264_encoder_close( h );
    fprintf( stderr, "\n" );

    if( b_ctrl_c )
        fprintf( stderr, "aborted at input frame %d\n", opt->i_seek + i_frame );

    p_close_infile( opt->hin );
    p_close_outfile( opt->hout );

    if( i_frame > 0 )
    {
        double fps = (double)i_frame * (double)1000000 /
                     (double)( i_end - i_start );

        fprintf( stderr, "encoded %d frames, %.2f fps, %.2f kb/s\n", i_frame, fps,
                 (double) i_file * 8 * param->i_fps_num /
                 ( (double) param->i_fps_den * i_frame * 1000 ) );
    }

    return 0;
}