コード例 #1
0
ファイル: MyWindow.cpp プロジェクト: dtbinh/bioGP
bool ProcessBuffer(string& buff, Eigen::VectorXd& motorAngle)
{
	bool ret = false;
	size_t endPos = FindLastOf(buff, "\t\n", buff.size());
	if (endPos >= buff.size())
	{
		std::cout << "Warning! End symbol not found!" << std::endl;
		return false;
	}
	else if (endPos == NUM_BYTES_PER_MOTOR * NUM_MOTORS + 2 - 1)
	{
		ret = ProcessFrame(buff, motorAngle);
	}
	else if (endPos < NUM_BYTES_PER_MOTOR * NUM_MOTORS + 2 - 1)
	{
		std::cout << "End symbol too early!" << std::endl;
	}
	else
	{
		size_t newEndPos = FindLastOf(buff, "\t\n", endPos - 2);
		if (endPos - newEndPos == NUM_BYTES_PER_MOTOR * NUM_MOTORS + 2)
		{
			string frame = buff.substr(newEndPos + 1, endPos);
			ret = ProcessFrame(frame, motorAngle);
			endPos = newEndPos;
		}
		else
		{
			std::cout << "Unknow reason." << std::endl;
		}
	}
	buff = buff.substr(endPos + 1, buff.size());

	return ret;
}
コード例 #2
0
ファイル: ApplicationImpl.cpp プロジェクト: zenkovich/o2
	void Application::Launch()
	{
		ShowWindow(mHWnd, SW_SHOW);

		mLog->Out("Application launched!");

		OnStarted();
		onStarted.Invoke();
		o2Events.OnApplicationStarted();

		MSG msg;
		memset(&msg, 0, sizeof(msg));

		//mTimer->Reset();

		while (msg.message != WM_QUIT)
		{
			if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE))
			{
				TranslateMessage(&msg);
				DispatchMessage(&msg);
			}
			else
			{
				ProcessFrame();
			}
		}

		o2Events.OnApplicationClosing();
		OnClosing();
		onClosing.Invoke();
	}
コード例 #3
0
void CHolly_Theora_Video::AddFrame( float fDelta, Holly::ColorMode::ENUM colormode, void* pData, unsigned int iFlags )
{
	ProcessFrame( false );

	Movie()->Utility()->Convert( pData, Width(), Height(), colormode, Holly::ColorMode::YUV, m_Frame[0].data, m_Frame[1].data, m_Frame[2].data, iFlags );
	m_bFrameWaiting = true;
}
コード例 #4
0
PVideoFrame __stdcall EdgeMask::GetFrame(int n, IScriptEnvironment* env)
{
	PVideoFrame	src = child->GetFrame(n, env);
	PVideoFrame	dst = env->NewVideoFrame(vi);
   ProcessFrame(dst, src, NULL, NULL, false, env);
	return dst;
}
コード例 #5
0
void FFrameGrabberProtocol::Tick()
{
	TArray<FCapturedFrameData> CapturedFrames = FrameGrabber->GetCapturedFrames();

	for (FCapturedFrameData& Frame : CapturedFrames)
	{
		ProcessFrame(MoveTemp(Frame));
	}
}
コード例 #6
0
ファイル: mainstate.c プロジェクト: Ikem/app-template
Msg const *MainState_CaptureColor(MainState *me, Msg *msg)
{
	struct APPLICATION_STATE *pState;
	bool bCaptureColor;
	
	switch (msg->evt)
	{
	case ENTRY_EVT:
		data.ipc.state.enAppMode = APP_CAPTURE_COLOR;
		data.pCurRawImg = data.u8FrameBuffers[0];
		return 0;
	case FRAMESEQ_EVT:
		/* Sleep here for a short while in order not to violate the vertical
		 * blank time of the camera sensor when triggering a new image
		 * right after receiving the old one. This can be removed if some
		 * heavy calculations are done here. */
		usleep(1000);
		return 0;
	case FRAMEPAR_EVT:
		/* Process the image. */
		ProcessFrame(data.pCurRawImg);
		
		/* Timestamp the capture of the image. */
		data.ipc.state.imageTimeStamp = OscSupCycGet();
		data.ipc.state.bNewImageReady = TRUE;
		return 0;
	case IPC_GET_APP_STATE_EVT:
		/* Fill in the response and schedule an acknowledge for the request. */
		pState = (struct APPLICATION_STATE*)data.ipc.req.pAddr;
		memcpy(pState, &data.ipc.state, sizeof(struct APPLICATION_STATE));
		
		data.ipc.enReqState = REQ_STATE_ACK_PENDING;
		return 0;
	case IPC_GET_COLOR_IMG_EVT:
		/* Write out the image to the address space of the CGI. */
		memcpy(data.ipc.req.pAddr, data.u8ResultImage, sizeof(data.u8ResultImage));
		
		data.ipc.state.bNewImageReady = FALSE;
		
		/* Mark the request as executed, so it will be acknowledged later. */
		data.ipc.enReqState = REQ_STATE_ACK_PENDING;
		return 0;
	case IPC_SET_CAPTURE_MODE_EVT:
		/* Read the option from the address space of the CGI. */
		bCaptureColor = *((bool*)data.ipc.req.pAddr);
		if (bCaptureColor == FALSE)
		{
			/* Need to capture raw images from now on, this is done in the captureRaw state.  */
			STATE_TRAN(me, &me->captureRaw);
		}
		data.ipc.enReqState = REQ_STATE_ACK_PENDING;
		return 0;
	}
	return msg;
}
コード例 #7
0
ファイル: melbanks.cpp プロジェクト: alongwithyou/PhnRec
int MelBanks::GetFeatures(float *ret_features)
{
	if(input_vector == 0)
		return 0;                                  // we do not have input data

	// copy one frame from the input buffer to the temporary one during the first pass through this function,
	// shift the temporal buffer left an copy next 10 ms from the input buffer in all follow passes.
	if(first_frame)
	{
		sCopy(vector_size, frame, input_vector);
                                               // input_vector -> frame

		// backup the frame
		sCopy(vector_size, frame_backup, frame);

		frame_pos = vector_size;
		input_vector_pos += vector_size;

		first_frame = false;
	}
	else
	{
		// restore the last frame and shift it 10 ms left 
		if(frame_pos == vector_size - step)
		{
			sCopy(vector_size, frame, frame_backup);  // frame_backup -> frame
			sShiftLeft(vector_size, frame, step);
		}

		int n_input = input_vector_len - input_vector_pos; 
		int n_output = vector_size - frame_pos;
		int n_process = (n_input < n_output ? n_input : n_output);
		sCopy(n_process, frame + frame_pos, input_vector + input_vector_pos);
		input_vector_pos += n_process;
		frame_pos += n_process;

		if(input_vector_pos > input_vector_len)
			input_vector = 0;
	}

	if(frame_pos == vector_size)
	{
		// backup the frame
		sCopy(vector_size, frame_backup, frame);

		frame_pos = vector_size - step;

		// perform parameterisation of one frame
		ProcessFrame(frame, ret_features);
		return 1;
	}

	return 0;
}
コード例 #8
0
bool CHolly_Theora_Video::End()
{
	ProcessFrame( true );

	th_encode_free( m_Encoder );
	m_Encoder = NULL;

	FreeFrame();

	return true;
}
コード例 #9
0
ファイル: Walking Animation.cpp プロジェクト: drew55555/games
void ComposeFrame()
{
  if(Timer->elapsed(last_time,200))
  {
    last_time=Timer->time();
    if(++current_frame>=4)current_frame=0;
  }

  ProcessFrame();

  glutPostRedisplay();
}
コード例 #10
0
void CRendering::RunL()
    {
    // Video driver finished to draw the last frame on the screen
    // You may initiate rendering the next frame from here, 
    // but it would be slow, since there is a delay between CDirectScreenBitmap::EndUpdate() 
    // and the completition of screen refresh by the video driver	
    if (iFrames++ <200)
    	{
    	ProcessFrame();
      	}
    else
    	{
    	CActiveScheduler::Stop();
    	}      
    }
コード例 #11
0
ファイル: actionthread.cpp プロジェクト: KDE/kipi-plugins
void ActionThread::processItem(int upperBound, MagickImage* const img, MagickImage* const imgNext, Action action)
{
/*
    // need to reimplement using appsrc plugin of gstreamer
    if(action == TYPE_IMAGE)
    {
        if(d->item->EffectName() == EFFECT_NONE)
            upperBound = 1;
    }
*/

    for (int n = 0; n < upperBound && d->running; n++)
    {
        Frame* const frm = getFrame(d->item, img, imgNext, n, action);
        ProcessFrame(frm);
        WriteFrame(frm);
        delete frm;
    }
}
コード例 #12
0
ファイル: app.cpp プロジェクト: quaikohc/dx11framework
void CApp::Run()
{
    MSG msg = {0};

    while(true)
    {
        if(PeekMessage(&msg, NULL, 0, 0, PM_REMOVE))
        {
            if (msg.message == WM_QUIT)
                break;

            TranslateMessage(&msg);
            DispatchMessage(&msg);
        }
        else
            ProcessFrame();
    }

    return;
}
コード例 #13
0
void CRetroPlayerVideo::Process()
{
  while (!m_bStop)
  {
    // 1s should be a good failsafe if the event isn't triggered (shouldn't happen)
    if (AbortableWait(m_frameEvent, 1000) == WAIT_INTERRUPTED)
      break;

    VideoFrame *frame = NULL;
    m_buffer.GetPacket(frame);
    if (!frame)
      continue;

    if (!ProcessFrame(*frame))
      break;
  }

  // Clean up
  if (m_swsContext)
    sws_freeContext(m_swsContext);
}
コード例 #14
0
ファイル: OBSVideoCapture.cpp プロジェクト: redflasher/OBS
//todo: this function is an abomination, this is just disgusting.  fix it.
//...seriously, this is really, really horrible.  I mean this is amazingly bad.
void OBS::MainCaptureLoop()
{
    int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0;
    int copyWait = NUM_RENDER_BUFFERS-1;

    bSentHeaders = false;
    bFirstAudioPacket = true;

    Vect2 baseSize    = Vect2(float(baseCX), float(baseCY));
    Vect2 outputSize  = Vect2(float(outputCX), float(outputCY));
    Vect2 scaleSize   = Vect2(float(scaleCX), float(scaleCY));

    HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI"));

    LPVOID nullBuff = NULL;

    //----------------------------------------
    // x264 input buffers

    int curOutBuffer = 0;

    x264_picture_t *lastPic = NULL;
    x264_picture_t outPics[NUM_OUT_BUFFERS];
    DWORD outTimes[NUM_OUT_BUFFERS] = {0, 0, 0};

    for(int i=0; i<NUM_OUT_BUFFERS; i++)
        x264_picture_init(&outPics[i]);

    if(bUsing444)
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
        {
            outPics[i].img.i_csp   = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
            outPics[i].img.i_plane = 1;
        }
    }
    else
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
            x264_picture_alloc(&outPics[i], X264_CSP_I420, outputCX, outputCY);
    }

    //----------------------------------------
    // time/timestamp stuff

    LARGE_INTEGER clockFreq;
    QueryPerformanceFrequency(&clockFreq);

    bufferedTimes.Clear();
    ctsOffsets.Clear();

#ifdef USE_100NS_TIME
    QWORD streamTimeStart = GetQPCTime100NS(clockFreq.QuadPart);
    QWORD frameTime100ns = 10000000/fps;

    QWORD sleepTargetTime = 0;
    bool bWasLaggedFrame = false;
#else
    DWORD streamTimeStart = OSGetTime();
#endif
    totalStreamTime = 0;

    lastAudioTimestamp = 0;

    latestVideoTime = firstSceneTimestamp = GetQPCTimeMS(clockFreq.QuadPart);

    DWORD fpsTimeNumerator = 1000-(frameTime*fps);
    DWORD fpsTimeDenominator = fps;
    DWORD fpsTimeAdjust = 0;

    DWORD cfrTime = 0;
    DWORD cfrTimeAdjust = 0;

    //----------------------------------------
    // start audio capture streams

    desktopAudio->StartCapture();
    if(micAudio) micAudio->StartCapture();

    //----------------------------------------
    // status bar/statistics stuff

    DWORD fpsCounter = 0;

    int numLongFrames = 0;
    int numTotalFrames = 0;

    int numTotalDuplicatedFrames = 0;

    bytesPerSec = 0;
    captureFPS = 0;
    curFramesDropped = 0;
    curStrain = 0.0;
    PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);

    QWORD lastBytesSent[3] = {0, 0, 0};
    DWORD lastFramesDropped = 0;
#ifdef USE_100NS_TIME
    double bpsTime = 0.0;
#else
    float bpsTime = 0.0f;
#endif
    double lastStrain = 0.0f;
    DWORD numSecondsWaited = 0;

    //----------------------------------------
    // 444->420 thread data

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads);
    Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads);

    zero(h420Threads, sizeof(HANDLE)*numThreads);
    zero(convertInfo, sizeof(Convert444Data)*numThreads);

    for(int i=0; i<numThreads; i++)
    {
        convertInfo[i].width  = outputCX;
        convertInfo[i].height = outputCY;
        convertInfo[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);

        if(i == 0)
            convertInfo[i].startY = 0;
        else
            convertInfo[i].startY = convertInfo[i-1].endY;

        if(i == (numThreads-1))
            convertInfo[i].endY = outputCY;
        else
            convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bool bFirstFrame = true;
    bool bFirstImage = true;
    bool bFirst420Encode = true;
    bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444;

    List<HANDLE> completeEvents;

    if(bUseThreaded420)
    {
        for(int i=0; i<numThreads; i++)
        {
            h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i);
            completeEvents << convertInfo[i].hSignalComplete;
        }
    }

    //----------------------------------------

    QWORD curStreamTime = 0, lastStreamTime, firstFrameTime = GetQPCTimeMS(clockFreq.QuadPart);
    lastStreamTime = firstFrameTime-frameTime;

    bool bFirstAudioPacket = true;

    while(bRunning)
    {
#ifdef USE_100NS_TIME
        QWORD renderStartTime = GetQPCTime100NS(clockFreq.QuadPart);

        if(sleepTargetTime == 0 || bWasLaggedFrame)
            sleepTargetTime = renderStartTime;
#else
        DWORD renderStartTime = OSGetTime();
        totalStreamTime = renderStartTime-streamTimeStart;

        DWORD frameTimeAdjust = frameTime;
        fpsTimeAdjust += fpsTimeNumerator;
        if(fpsTimeAdjust > fpsTimeDenominator)
        {
            fpsTimeAdjust -= fpsTimeDenominator;
            ++frameTimeAdjust;
        }
#endif

        bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled;

        profileIn("frame");

#ifdef USE_100NS_TIME
        QWORD qwTime = renderStartTime/10000;
        latestVideoTime = qwTime;

        QWORD frameDelta = renderStartTime-lastStreamTime;
        double fSeconds = double(frameDelta)*0.0000001;

        //Log(TEXT("frameDelta: %f"), fSeconds);

        lastStreamTime = renderStartTime;
#else
        QWORD qwTime = GetQPCTimeMS(clockFreq.QuadPart);
        latestVideoTime = qwTime;

        QWORD frameDelta = qwTime-lastStreamTime;
        float fSeconds = float(frameDelta)*0.001f;

        //Log(TEXT("frameDelta: %llu"), frameDelta);

        lastStreamTime = qwTime;
#endif

        //------------------------------------

        if(bRequestKeyframe && keyframeWait > 0)
        {
            keyframeWait -= int(frameDelta);

            if(keyframeWait <= 0)
            {
                GetVideoEncoder()->RequestKeyframe();
                bRequestKeyframe = false;
            }
        }

        if(!bPushToTalkDown && pushToTalkTimeLeft > 0)
        {
            pushToTalkTimeLeft -= int(frameDelta);
            OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft);
            if(pushToTalkTimeLeft <= 0)
            {
                pushToTalkTimeLeft = 0;
                bPushToTalkOn = false;
            }
        }

        //------------------------------------

        OSEnterMutex(hSceneMutex);

        if(bResizeRenderView)
        {
            GS->ResizeView();
            bResizeRenderView = false;
        }

        //------------------------------------

        if(scene)
        {
            profileIn("scene->Preprocess");
            scene->Preprocess();

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Preprocess();

            profileOut;

            scene->Tick(float(fSeconds));

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Tick(float(fSeconds));
        }

        //------------------------------------

        QWORD curBytesSent = network->GetCurrentSentBytes();
        curFramesDropped = network->NumDroppedFrames();
        bool bUpdateBPS = false;

        bpsTime += fSeconds;
        if(bpsTime > 1.0f)
        {
            if(numSecondsWaited < 3)
                ++numSecondsWaited;

            //bytesPerSec = DWORD(curBytesSent - lastBytesSent);
            bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited;

            if(bpsTime > 2.0)
                bpsTime = 0.0f;
            else
                bpsTime -= 1.0;

            if(numSecondsWaited == 3)
            {
                lastBytesSent[0] = lastBytesSent[1];
                lastBytesSent[1] = lastBytesSent[2];
                lastBytesSent[2] = curBytesSent;
            }
            else
                lastBytesSent[numSecondsWaited] = curBytesSent;

            captureFPS = fpsCounter;
            fpsCounter = 0;

            bUpdateBPS = true;
        }

        fpsCounter++;

        curStrain = network->GetPacketStrain();

        EnableBlending(TRUE);
        BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA);

        //------------------------------------
        // render the mini render texture

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(mainPixelShader);

        SetRenderTarget(mainRenderTextures[curRenderTarget]);

        Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0, 0, baseSize.x, baseSize.y);

        if(scene)
            scene->Render();

        //------------------------------------

        if(bTransitioning)
        {
            if(!transitionTexture)
            {
                transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE);
                if(transitionTexture)
                {
                    D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture);
                    D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]);
                    GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture);
                }
                else
                    bTransitioning = false;
            }
            else if(transitionAlpha >= 1.0f)
            {
                delete transitionTexture;
                transitionTexture = NULL;

                bTransitioning = false;
            }
        }

        if(bTransitioning)
        {
            EnableBlending(TRUE);
            transitionAlpha += float(fSeconds)*5.0f;
            if(transitionAlpha > 1.0f)
                transitionAlpha = 1.0f;
        }
        else
            EnableBlending(FALSE);

        //------------------------------------
        // render the mini view thingy

        if(bRenderView)
        {
            Vect2 renderFrameSize = Vect2(float(renderFrameWidth), float(renderFrameHeight));

            SetRenderTarget(NULL);

            LoadVertexShader(mainVertexShader);
            LoadPixelShader(mainPixelShader);

            Ortho(0.0f, renderFrameSize.x, renderFrameSize.y, 0.0f, -100.0f, 100.0f);
            SetViewport(0.0f, 0.0f, renderFrameSize.x, renderFrameSize.y);

            if(bTransitioning)
            {
                BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
                if(renderFrameIn1To1Mode)
                    DrawSprite(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y);
                else
                    DrawSprite(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, renderFrameSize.x, renderFrameSize.y);
                BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
            }

            if(renderFrameIn1To1Mode)
                DrawSprite(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y);
            else
                DrawSprite(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, renderFrameSize.x, renderFrameSize.y);

            Ortho(0.0f, renderFrameSize.x, renderFrameSize.y, 0.0f, -100.0f, 100.0f);

            //draw selections if in edit mode
            if(bEditMode && !bSizeChanging)
            {
                LoadVertexShader(solidVertexShader);
                LoadPixelShader(solidPixelShader);
                solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFFFF0000);

                if(renderFrameIn1To1Mode)
                    Ortho(0.0f, renderFrameSize.x * downscale, renderFrameSize.y * downscale, 0.0f, -100.0f, 100.0f);
                else
                    Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);

                if(scene)
                    scene->RenderSelections();
            }
        }
        else if(bForceRenderViewErase)
        {
            InvalidateRect(hwndRenderFrame, NULL, TRUE);
            UpdateWindow(hwndRenderFrame);
            bForceRenderViewErase = false;
        }

        //------------------------------------
        // actual stream output

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(yuvScalePixelShader);

        Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget];
        SetRenderTarget(yuvRenderTexture);

        yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize);

        Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y);

        //why am I using scaleSize instead of outputSize for the texture?
        //because outputSize can be trimmed by up to three pixels due to 128-bit alignment.
        //using the scale function with outputSize can cause slightly inaccurate scaled images
        if(bTransitioning)
        {
            BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
            DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, scaleSize.x, scaleSize.y);
            BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
        }

        DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, outputSize.x, outputSize.y);

        //------------------------------------

        if(bRenderView && !copyWait)
            static_cast<D3D10System*>(GS)->swap->Present(0, 0);

        OSLeaveMutex(hSceneMutex);

        //------------------------------------
        // present/upload

        profileIn("video encoding and uploading");

        bool bEncode = true;

        if(copyWait)
        {
            copyWait--;
            bEncode = false;
        }
        else
        {
            //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing
            if(!bRecievedFirstAudioFrame)
                bEncode = false;
            else if(bFirstFrame)
            {
                firstFrameTime = qwTime;
                bFirstFrame = false;
            }

            if(!bEncode)
            {
                if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                    curYUVTexture = 0;
                else
                    curYUVTexture++;
            }
        }

        if(bEncode)
        {
            curStreamTime = qwTime-firstFrameTime;

            UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1;

            ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
            profileIn("CopyResource");

            if(!bFirst420Encode && bUseThreaded420)
            {
                WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE);
                copyTexture->Unmap(0);
            }

            D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]);
            GetD3D()->CopyResource(copyTexture, d3dYUV->texture);
            profileOut;

            ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture];

            if(bFirstImage) //ignore the first frame
                bFirstImage = false;
            else
            {
                HRESULT result;
                D3D10_MAPPED_TEXTURE2D map;
                if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map)))
                {
                    int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1;
                    int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1;

                    x264_picture_t &prevPicOut = outPics[prevOutBuffer];
                    x264_picture_t &picOut = outPics[curOutBuffer];
                    x264_picture_t &nextPicOut = outPics[nextOutBuffer];

                    if(!bUsing444)
                    {
                        profileIn("conversion to 4:2:0");

                        if(bUseThreaded420)
                        {
                            outTimes[nextOutBuffer] = (DWORD)curStreamTime;

                            for(int i=0; i<numThreads; i++)
                            {
                                convertInfo[i].input     = (LPBYTE)map.pData;
                                convertInfo[i].pitch     = map.RowPitch;
                                convertInfo[i].output[0] = nextPicOut.img.plane[0];
                                convertInfo[i].output[1] = nextPicOut.img.plane[1];
                                convertInfo[i].output[2] = nextPicOut.img.plane[2];
                                SetEvent(convertInfo[i].hSignalConvert);
                            }

                            if(bFirst420Encode)
                                bFirst420Encode = bEncode = false;
                        }
                        else
                        {
                            outTimes[curOutBuffer] = (DWORD)curStreamTime;
                            Convert444to420((LPBYTE)map.pData, outputCX, map.RowPitch, outputCY, 0, outputCY, picOut.img.plane, SSE2Available());
                            prevTexture->Unmap(0);
                        }

                        profileOut;
                    }
                    else
                    {
                        outTimes[curOutBuffer] = (DWORD)curStreamTime;

                        picOut.img.i_stride[0] = map.RowPitch;
                        picOut.img.plane[0]    = (uint8_t*)map.pData;
                    }

                    if(bEncode)
                    {
                        DWORD curFrameTimestamp = outTimes[prevOutBuffer];
                        //Log(TEXT("curFrameTimestamp: %u"), curFrameTimestamp);

                        //------------------------------------

                        FrameProcessInfo frameInfo;
                        frameInfo.firstFrameTime = firstFrameTime;
                        frameInfo.prevTexture = prevTexture;

                        if(bUseCFR)
                        {
                            while(cfrTime < curFrameTimestamp)
                            {
                                DWORD frameTimeAdjust = frameTime;
                                cfrTimeAdjust += fpsTimeNumerator;
                                if(cfrTimeAdjust > fpsTimeDenominator)
                                {
                                    cfrTimeAdjust -= fpsTimeDenominator;
                                    ++frameTimeAdjust;
                                }

                                DWORD halfTime = (frameTimeAdjust+1)/2;

                                x264_picture_t *nextPic = (curFrameTimestamp-cfrTime <= halfTime) ? &picOut : &prevPicOut;
                                //Log(TEXT("cfrTime: %u, time: %u"), cfrTime, curFrameTimestamp);

                                //these lines are just for counting duped frames
                                if(nextPic == lastPic)
                                    ++numTotalDuplicatedFrames;
                                else
                                    lastPic = nextPic;

                                frameInfo.picOut = nextPic;
                                frameInfo.picOut->i_pts = cfrTime;
                                frameInfo.frameTimestamp = cfrTime;
                                ProcessFrame(frameInfo);

                                cfrTime += frameTimeAdjust;

                                //Log(TEXT("cfrTime: %u, chi frame: %u"), cfrTime, (curFrameTimestamp-cfrTime <= halfTime));
                            }
                        }
                        else
                        {
                            picOut.i_pts = curFrameTimestamp;

                            frameInfo.picOut = &picOut;
                            frameInfo.frameTimestamp = curFrameTimestamp;

                            ProcessFrame(frameInfo);
                        }
                    }

                    curOutBuffer = nextOutBuffer;
                }
                else
                {
                    //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled
                    CrashError (TEXT("Texture->Map failed: 0x%08x"), result);
                }
            }

            if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
                curCopyTexture = 0;
            else
                curCopyTexture++;

            if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                curYUVTexture = 0;
            else
                curYUVTexture++;
        }

        lastRenderTarget = curRenderTarget;

        if(curRenderTarget == (NUM_RENDER_BUFFERS-1))
            curRenderTarget = 0;
        else
            curRenderTarget++;

        if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped)
        {
            PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);
            lastStrain = curStrain;

            lastFramesDropped = curFramesDropped;
        }

        profileOut;
        profileOut;

        //------------------------------------
        // get audio while sleeping or capturing
        //ReleaseSemaphore(hRequestAudioEvent, 1, NULL);

        //------------------------------------
        // frame sync

#ifdef USE_100NS_TIME
        QWORD renderStopTime = GetQPCTime100NS(clockFreq.QuadPart);
        sleepTargetTime += frameTime100ns;

        if(bWasLaggedFrame = (sleepTargetTime <= renderStopTime))
            numLongFrames++;
        else
            SleepTo(clockFreq.QuadPart, sleepTargetTime);
#else
        DWORD renderStopTime = OSGetTime();
        DWORD totalTime = renderStopTime-renderStartTime;

        if(totalTime > frameTimeAdjust)
            numLongFrames++;
        else if(totalTime < frameTimeAdjust)
            OSSleep(frameTimeAdjust-totalTime);
#endif

        //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime);

        numTotalFrames++;
    }

    if(!bUsing444)
    {
        if(bUseThreaded420)
        {
            for(int i=0; i<numThreads; i++)
            {
                if(h420Threads[i])
                {
                    convertInfo[i].bKillThread = true;
                    SetEvent(convertInfo[i].hSignalConvert);

                    OSTerminateThread(h420Threads[i], 10000);
                    h420Threads[i] = NULL;
                }

                if(convertInfo[i].hSignalConvert)
                {
                    CloseHandle(convertInfo[i].hSignalConvert);
                    convertInfo[i].hSignalConvert = NULL;
                }

                if(convertInfo[i].hSignalComplete)
                {
                    CloseHandle(convertInfo[i].hSignalComplete);
                    convertInfo[i].hSignalComplete = NULL;
                }
            }

            if(!bFirst420Encode)
            {
                ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
                copyTexture->Unmap(0);
            }
        }

        for(int i=0; i<NUM_OUT_BUFFERS; i++)
            x264_picture_clean(&outPics[i]);
    }

    Free(h420Threads);
    Free(convertInfo);

    Log(TEXT("Total frames rendered: %d, number of frames that lagged: %d (%0.2f%%) (it's okay for some frames to lag)"), numTotalFrames, numLongFrames, (double(numLongFrames)/double(numTotalFrames))*100.0);
    if(bUseCFR)
        Log(TEXT("Total duplicated CFR frames: %d"), numTotalDuplicatedFrames);
}
コード例 #15
0
/// <summary>
/// Main processing function
/// </summary>
void CCoordinateMappingBasics::Update()
{
    if (!m_pMultiSourceFrameReader)
    {
        return;
    }

    IMultiSourceFrame* pMultiSourceFrame = NULL;
    IDepthFrame* pDepthFrame = NULL;
    IColorFrame* pColorFrame = NULL;
    IBodyIndexFrame* pBodyIndexFrame = NULL;

    IBodyFrame* pBodyFrame = NULL;

    HRESULT hr = m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame);

    if (SUCCEEDED(hr))
    {
        IDepthFrameReference* pDepthFrameReference = NULL;

        hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pDepthFrameReference->AcquireFrame(&pDepthFrame);
        }

        SafeRelease(pDepthFrameReference);
    }

    if (SUCCEEDED(hr))
    {
        IColorFrameReference* pColorFrameReference = NULL;

        hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pColorFrameReference->AcquireFrame(&pColorFrame);
        }

        SafeRelease(pColorFrameReference);
    }

    if (SUCCEEDED(hr))
    {
        IBodyIndexFrameReference* pBodyIndexFrameReference = NULL;

        hr = pMultiSourceFrame->get_BodyIndexFrameReference(&pBodyIndexFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrameReference->AcquireFrame(&pBodyIndexFrame);
        }

        SafeRelease(pBodyIndexFrameReference);
    }

    if (SUCCEEDED(hr))
    {
        IBodyFrameReference* pBodyFrameReference = NULL;

        hr = pMultiSourceFrame->get_BodyFrameReference(&pBodyFrameReference);
        if (SUCCEEDED(hr))
        {
            hr = pBodyFrameReference->AcquireFrame(&pBodyFrame);
        }

		SafeRelease(pBodyFrameReference);
    }

    if (SUCCEEDED(hr))
    {
		// Depth
        INT64 nDepthTime = 0;
        IFrameDescription* pDepthFrameDescription = NULL;
        int nDepthWidth = 0;
        int nDepthHeight = 0;
        UINT nDepthBufferSize = 0;
        UINT16 *pDepthBuffer = NULL;

		// Color
        IFrameDescription* pColorFrameDescription = NULL;
        int nColorWidth = 0;
        int nColorHeight = 0;
        ColorImageFormat imageFormat = ColorImageFormat_None;
        UINT nColorBufferSize = 0;
        RGBQUAD *pColorBuffer = NULL;

		// BodyIndex
        IFrameDescription* pBodyIndexFrameDescription = NULL;
        int nBodyIndexWidth = 0;
        int nBodyIndexHeight = 0;
        UINT nBodyIndexBufferSize = 0;
        BYTE *pBodyIndexBuffer = NULL;

		// Body
		IBody* ppBodies[BODY_COUNT] = { 0 };

        // get depth frame data
        hr = pDepthFrame->get_RelativeTime(&nDepthTime);

		// Depth
        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrameDescription->get_Width(&nDepthWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrameDescription->get_Height(&nDepthHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer);            
        }

        // get color frame data
        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_FrameDescription(&pColorFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrameDescription->get_Width(&nColorWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrameDescription->get_Height(&nColorHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
        }

        if (SUCCEEDED(hr))
        {
            if (imageFormat == ColorImageFormat_Bgra)
            {
                hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer));
            }
            else if (m_pColorRGBX)
            {
                pColorBuffer = m_pColorRGBX;
                nColorBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD);
                hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra);
            }
            else
            {
                hr = E_FAIL;
            }
        }

        // get body index frame data
        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrame->get_FrameDescription(&pBodyIndexFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrameDescription->get_Width(&nBodyIndexWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrameDescription->get_Height(&nBodyIndexHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pBodyIndexFrame->AccessUnderlyingBuffer(&nBodyIndexBufferSize, &pBodyIndexBuffer);            
        }

		// get body frame data

		if (SUCCEEDED(hr))
		{
			hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
		}

        if (SUCCEEDED(hr))
        {
            ProcessFrame(nDepthTime, pDepthBuffer, nDepthWidth, nDepthHeight, 
                pColorBuffer, nColorWidth, nColorHeight,
				pBodyIndexBuffer, nBodyIndexWidth, nBodyIndexHeight, BODY_COUNT, ppBodies);
        }

        SafeRelease(pDepthFrameDescription);
        SafeRelease(pColorFrameDescription);
        SafeRelease(pBodyIndexFrameDescription);

		for (int i = 0; i < _countof(ppBodies); ++i)
		{
			SafeRelease(ppBodies[i]);
		}
	}

    SafeRelease(pDepthFrame);
    SafeRelease(pColorFrame);
    SafeRelease(pBodyIndexFrame);
    SafeRelease(pBodyFrame);
    SafeRelease(pMultiSourceFrame);
}
コード例 #16
0
ファイル: mainstate.c プロジェクト: tghaefli/EBV
Msg const *MainState_top(MainState *me, Msg *msg)
{
        struct APPLICATION_STATE *pState;
        switch (msg->evt) {
        case START_EVT:
                /* initialize the whole stuff - is this the right place ? */
                STATE_START(me, &me->showGray);
                data.ipc.state.enAppMode = APP_CAPTURE_ON;
                data.pCurRawImg = data.u8FrameBuffers[0];
                data.nExposureTimeChanged = true;
                data.nResetProcessing = false;
                data.AddBufSize = 0;
                data.ipc.state.nExposureTime = 25;
                data.ipc.state.nStepCounter = 0;
                data.ipc.state.nThreshold = 0;
                return 0;
        case IPC_GET_APP_STATE_EVT:
                /* Fill in the response and schedule an acknowledge for the request. */
                pState = (struct APPLICATION_STATE*)data.ipc.req.pAddr;
                memcpy(pState, &data.ipc.state, sizeof(struct APPLICATION_STATE));

                data.ipc.enReqState = REQ_STATE_ACK_PENDING;
                return 0;
        case FRAMESEQ_EVT:
                /* Timestamp the capture of the image. */
                data.ipc.state.imageTimeStamp = OscSupCycGet();
                data.ipc.state.bNewImageReady = TRUE;
                /* Sleep here for a short while in order not to violate the vertical
                 * blank time of the camera sensor when triggering a new image
                 * right after receiving the old one. This can be removed if some
                 * heavy calculations are done here. */
                //usleep(4000);
                return 0;
        case FRAMEPAR_EVT: {
                /* we have a new image increase counter: here and only here! */
                data.ipc.state.nStepCounter++;
                /* debayer the image first -> to half size*/
#if NUM_COLORS == 1
                OscVisDebayerGreyscaleHalfSize(data.pCurRawImg, OSC_CAM_MAX_IMAGE_WIDTH, OSC_CAM_MAX_IMAGE_HEIGHT, ROW_YUYV, data.u8TempImage[SENSORIMG]);
#else
                memcpy(data.u8TempImage[SENSORIMG], data.pCurRawImg, NUM_COLORS*OSC_CAM_MAX_IMAGE_HEIGHT*OSC_CAM_MAX_IMAGE_WIDTH);
#endif
                /* Process the image. */
                //set data buffer to zero before each step
                data.AddBufSize = 0;
                ProcessFrame();

                return 0;
        }
        case IPC_SET_IMAGE_TYPE_EVT: {
                if(data.ipc.state.nImageType == SENSORIMG) {
                        STATE_TRAN(me, &me->showGray);
                } else if(data.ipc.state.nImageType == THRESHOLD) {
                        STATE_TRAN(me, &me->showThreshold);
                } else if(data.ipc.state.nImageType == BACKGROUND) {
                        STATE_TRAN(me, &me->showBackground);
                } else {
                        data.ipc.enReqState = REQ_STATE_NACK_PENDING;
                }
                data.ipc.enReqState = REQ_STATE_ACK_PENDING;
                return 0;
        }
        case IPC_GET_NEW_IMG_EVT:
                /* If the IPC event is not handled in the actual substate, a negative acknowledge is returned by default. */
                data.ipc.enReqState = REQ_STATE_NACK_PENDING;
                return 0;
        }
        return msg;
}
コード例 #17
0
ファイル: OBSVideoCapture.cpp プロジェクト: kusl/OBS
void OBS::EncodeLoop()
{
    QWORD streamTimeStart = GetQPCTimeNS();
    QWORD frameTimeNS = 1000000000/fps;
    bool bufferedFrames = true; //to avoid constantly polling number of frames
    int numTotalDuplicatedFrames = 0, numTotalFrames = 0, numFramesSkipped = 0;

    bufferedTimes.Clear();

    bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;

    QWORD sleepTargetTime = streamTimeStart+frameTimeNS;
    latestVideoTime = firstSceneTimestamp = streamTimeStart/1000000;
    latestVideoTimeNS = streamTimeStart;

    firstFrameTimestamp = 0;

    UINT encoderInfo = 0;
    QWORD messageTime = 0;

    EncoderPicture *lastPic = NULL;

    UINT skipThreshold = encoderSkipThreshold*2;
    UINT no_sleep_counter = 0;

    CircularList<QWORD> bufferedTimes;

    while(!bShutdownEncodeThread || (bufferedFrames && !bTestStream)) {
        if (!SleepToNS(sleepTargetTime += (frameTimeNS/2)))
            no_sleep_counter++;
        else
            no_sleep_counter = 0;

        latestVideoTime = sleepTargetTime/1000000;
        latestVideoTimeNS = sleepTargetTime;

        if (no_sleep_counter < skipThreshold) {
            SetEvent(hVideoEvent);
            if (encoderInfo) {
                if (messageTime == 0) {
                    messageTime = latestVideoTime+3000;
                } else if (latestVideoTime >= messageTime) {
                    RemoveStreamInfo(encoderInfo);
                    encoderInfo = 0;
                    messageTime = 0;
                }
            }
        } else {
            numFramesSkipped++;
            if (!encoderInfo)
                encoderInfo = AddStreamInfo(Str("EncoderLag"), StreamInfoPriority_Critical);
            messageTime = 0;
        }

        if (!SleepToNS(sleepTargetTime += (frameTimeNS/2)))
            no_sleep_counter++;
        else
            no_sleep_counter = 0;
        bufferedTimes << latestVideoTime;

        if (curFramePic && firstFrameTimestamp) {
            while (bufferedTimes[0] < firstFrameTimestamp)
                bufferedTimes.Remove(0);

            DWORD curFrameTimestamp = DWORD(bufferedTimes[0] - firstFrameTimestamp);
            bufferedTimes.Remove(0);

            profileIn("encoder thread frame");

            FrameProcessInfo frameInfo;
            frameInfo.firstFrameTime = firstFrameTimestamp;
            frameInfo.frameTimestamp = curFrameTimestamp;
            frameInfo.pic = curFramePic;

            if (lastPic == frameInfo.pic)
                numTotalDuplicatedFrames++;

            if(bUsingQSV)
                curFramePic->mfxOut->Data.TimeStamp = curFrameTimestamp;
            else
                curFramePic->picOut->i_pts = curFrameTimestamp;

            ProcessFrame(frameInfo);

            if (bShutdownEncodeThread)
                bufferedFrames = videoEncoder->HasBufferedFrames();

            lastPic = frameInfo.pic;

            profileOut;

            numTotalFrames++;
        }
    }

    //flush all video frames in the "scene buffering time" buffer
    if (firstFrameTimestamp && bufferedVideo.Num())
    {
        QWORD startTime = GetQPCTimeMS();
        DWORD baseTimestamp = bufferedVideo[0].timestamp;

        for(UINT i=0; i<bufferedVideo.Num(); i++)
        {
            //we measure our own time rather than sleep between frames due to potential sleep drift
            QWORD curTime;
            do
            {
                curTime = GetQPCTimeMS();
                OSSleep (1);
            } while (curTime - startTime < bufferedVideo[i].timestamp - baseTimestamp);

            SendFrame(bufferedVideo[i], firstFrameTimestamp);
            bufferedVideo[i].Clear();

            numTotalFrames++;
        }

        bufferedVideo.Clear();
    }

    Log(TEXT("Total frames encoded: %d, total frames duplicated: %d (%0.2f%%)"), numTotalFrames, numTotalDuplicatedFrames, (numTotalFrames > 0) ? (double(numTotalDuplicatedFrames)/double(numTotalFrames))*100.0 : 0.0f);
    if (numFramesSkipped)
        Log(TEXT("Number of frames skipped due to encoder lag: %d (%0.2f%%)"), numFramesSkipped, (numTotalFrames > 0) ? (double(numFramesSkipped)/double(numTotalFrames))*100.0 : 0.0f);

    SetEvent(hVideoEvent);
    bShutdownVideoThread = true;
}
コード例 #18
0
int main(int argc, char* argv[]) {
    long frameToLearn = 16;
    cvNamedWindow("result", 1);
    cvNamedWindow("bg", 1);
#if 1
    CvBGCodeBookModel* model = cvCreateBGCodeBookModel();

    //Set color thresholds to default values
    model->modMin[0] = 3;
    model->modMin[1] = model->modMin[2] = 3;
    model->modMax[0] = 10;
    model->modMax[1] = model->modMax[2] = 10;
    model->cbBounds[0] = model->cbBounds[1] = model->cbBounds[2] = 10;

    CvCapture* capture = cvCreateFileCapture(argv[1]);
    if(3 == argc) {
        frameToLearn = atol(argv[2]);
    }
    IplImage *rawImage = 0, *yuvImage = 0; //yuvImage is for codebook method
    IplImage *imaskCodeBook = 0;
    long frameSeq = 0;

    while(true) {
      rawImage = cvQueryFrame(capture);
      if(!rawImage) {
        std::cout << "null frame" << std::endl;
        break;
      }

      ++frameSeq;
      std::cout << frameSeq << std::endl;

      char fnStr[32];
      snprintf(fnStr, sizeof(fnStr), "images/%08ld.jpg", frameSeq);
      cv::Mat mImg = rawImage;

      char fmStr[64];
      blobs.clear();

      if(1 == frameSeq) {
        yuvImage = cvCloneImage(rawImage);
        imaskCodeBook = cvCreateImage( cvGetSize(rawImage), IPL_DEPTH_8U, 1 );
        cvSet(imaskCodeBook,cvScalar(255));
      }

      cvCvtColor( rawImage, yuvImage, CV_BGR2YCrCb );//YUV For codebook method
      if(frameSeq < frameToLearn) {  // learning
        cvBGCodeBookUpdate(model, yuvImage);

        snprintf(fmStr, sizeof(fmStr), "%ld : %d", frameSeq, blobs.size());
        cv::putText(mImg, fmStr, cv::Point(10, 40), cv::FONT_HERSHEY_SIMPLEX, 1, CV_RGB(255, 0, 0));

        cv::Mat mImg = rawImage;
        cv::imwrite(fnStr, mImg);
      } else {
        if(frameSeq == frameToLearn)
          cvBGCodeBookClearStale( model, model->t/2 );

        cvBGCodeBookDiff( model, yuvImage, imaskCodeBook );

        ProcessFrame(imaskCodeBook, rawImage, 20, blob_collector);
        for(std::vector<CvRect>::const_iterator cr = blobs.begin(); cr != blobs.end(); ++cr) {
            std::cout << cr->x << ", " << cr->y << ", " << cr->width << ", " << cr->height << std::endl;
            cv::rectangle(mImg, *cr, CV_RGB(0,255,255));
        }

        snprintf(fmStr, sizeof(fmStr), "%ld : %d", frameSeq, blobs.size());
        cv::putText(mImg, fmStr, cv::Point(10, 40), cv::FONT_HERSHEY_SIMPLEX, 1, CV_RGB(255, 0, 0));

        cv::Mat m = imaskCodeBook;
        cv::imshow("bg", m);
        cv::imshow("result", mImg);
        cv::waitKey(0);
        //cv::imwrite(fnStr, mImg);
      }
    }
    std::cout << "done" << std::endl;

    cvReleaseImage(&imaskCodeBook);
    cvReleaseImage(&rawImage);
    cvReleaseImage(&yuvImage);
    cvReleaseCapture(&capture);
#else
    cv::Mat m;
    m = cv::imread(argv[1]);
    cv::imshow("result", m);
    cv::waitKey(0);
#endif
}
コード例 #19
0
PVideoFrame __stdcall DEdgeMask2::GetFrame(int n, IScriptEnvironment* env)
{
	PVideoFrame	dst = env->NewVideoFrame(vi);
   ProcessFrame(dst, child->GetFrame(n, env), child2->GetFrame(n, env), child3->GetFrame(n, env), false, env);
	return dst;
}
コード例 #20
0
long long PrePostRollFlagger::findBreakInrange(long long startFrame,
                                               long long stopFrame,
                                               long long totalFrames,
                                               long long &framesProcessed,
                                             QTime &flagTime, bool findLast)
{
    float flagFPS;
    int requiredBuffer = 30;
    long long currentFrameNumber;
    int prevpercent = -1;

    if(startFrame > 0)
        startFrame--;
    else
        startFrame = 0;

    player->DiscardVideoFrame(player->GetRawVideoFrame(0));

    long long tmpStartFrame = startFrame;
    VideoFrame* f = player->GetRawVideoFrame(tmpStartFrame);
    float aspect = player->GetVideoAspect();
    currentFrameNumber = f->frameNumber;
    LOG(VB_COMMFLAG, LOG_INFO, QString("Starting with frame %1")
            .arg(currentFrameNumber));
    player->DiscardVideoFrame(f);

    long long foundFrame = 0;

    while (player->GetEof() == kEofStateNone)
    {
        struct timeval startTime;
        if (stillRecording)
            gettimeofday(&startTime, NULL);

        VideoFrame* currentFrame = player->GetRawVideoFrame();
        currentFrameNumber = currentFrame->frameNumber;

        if(currentFrameNumber % 1000 == 0)
        {
            LOG(VB_COMMFLAG, LOG_INFO, QString("Processing frame %1")
                    .arg(currentFrameNumber));
        }

        if(currentFrameNumber > stopFrame || (!findLast && foundFrame))
        {
            player->DiscardVideoFrame(currentFrame);
            break;
        }

        double newAspect = currentFrame->aspect;
        if (newAspect != aspect)
        {
            SetVideoParams(aspect);
            aspect = newAspect;
        }

        if (((currentFrameNumber % 500) == 0) ||
            (((currentFrameNumber % 100) == 0) &&
             (stillRecording)))
        {
            emit breathe();
            if (m_bStop)
            {
                player->DiscardVideoFrame(currentFrame);
                return false;
            }
        }

        while (m_bPaused)
        {
            emit breathe();
            sleep(1);
        }

        // sleep a little so we don't use all cpu even if we're niced
        if (!fullSpeed && !stillRecording)
            usleep(10000);

        if (((currentFrameNumber % 500) == 0) ||
            ((showProgress || stillRecording) &&
             ((currentFrameNumber % 100) == 0)))
        {
            float elapsed = flagTime.elapsed() / 1000.0;

            if (elapsed)
                flagFPS = framesProcessed / elapsed;
            else
                flagFPS = 0.0;

            int percentage;
            if (stopFrame)
                percentage = framesProcessed * 100 / totalFrames;
            else
                percentage = 0;

            if (percentage > 100)
                percentage = 100;

            if (showProgress)
            {
                if (stopFrame)
                {
                    QString tmp = QString("\b\b\b\b\b\b\b\b\b\b\b%1%/%2fps")
                        .arg(percentage, 3).arg((int)flagFPS, 3);
                    QByteArray ba = tmp.toAscii();
                    cerr << ba.constData() << flush;
                }
                else
                {
                    QString tmp = QString("\b\b\b\b\b\b\b\b\b\b\b\b\b%1/%2fps")
                        .arg(currentFrameNumber, 6).arg((int)flagFPS, 3);
                    QByteArray ba = tmp.toAscii();
                    cerr << ba.constData() << flush;
                }
                cerr.flush();
            }

            if (stopFrame)
                emit statusUpdate(QObject::tr("%1% Completed @ %2 fps.")
                                  .arg(percentage).arg(flagFPS));
            else
                emit statusUpdate(QObject::tr("%1 Frames Completed @ %2 fps.")
                                  .arg((long)currentFrameNumber).arg(flagFPS));

            if (percentage % 10 == 0 && prevpercent != percentage)
            {
                prevpercent = percentage;
                LOG(VB_GENERAL, LOG_INFO, QString("%1%% Completed @ %2 fps.")
                    .arg(percentage) .arg(flagFPS));
            }
        }

        ProcessFrame(currentFrame, currentFrameNumber);

        if(frameInfo[currentFrameNumber].flagMask &
           (COMM_FRAME_SCENE_CHANGE | COMM_FRAME_BLANK))
        {
            foundFrame = currentFrameNumber;
        }

        if (stillRecording)
        {
            int secondsRecorded =
                recordingStartedAt.secsTo(MythDate::current());
            int secondsFlagged = (int)(framesProcessed / fps);
            int secondsBehind = secondsRecorded - secondsFlagged;
            long usecPerFrame = (long)(1.0 / player->GetFrameRate() * 1000000);

            struct timeval endTime;
            gettimeofday(&endTime, NULL);

            long long usecSleep =
                      usecPerFrame -
                      (((endTime.tv_sec - startTime.tv_sec) * 1000000) +
                       (endTime.tv_usec - startTime.tv_usec));

            if (secondsBehind > requiredBuffer)
            {
                if (fullSpeed)
                    usecSleep = 0;
                else
                    usecSleep = (long)(usecSleep * 0.25);
            }
            else if (secondsBehind < requiredBuffer)
                usecSleep = (long)(usecPerFrame * 1.5);

            if (usecSleep > 0)
                usleep(usecSleep);
        }

        player->DiscardVideoFrame(currentFrame);
        framesProcessed++;
    }
    return foundFrame;
}
コード例 #21
0
ファイル: ApplicationImpl.cpp プロジェクト: zenkovich/o2
	LRESULT WndProcFunc::WndProc(HWND wnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
	{
		auto app = Application::InstancePtr();

		POINT pt;
		RECT rt;
		int key = 0;
		Vec2I size, pos;
		GetCursorPos(&pt);
		ScreenToClient(wnd, &pt);
		Vec2F cursorPos = Vec2F((float)pt.x, (float)-pt.y);

		if (app->mRender)
			cursorPos -= Vec2F(Math::Round(app->mRender->mResolution.x*0.5f),
							   Math::Round(-app->mRender->mResolution.y*0.5f));

		float wheelDelta;

		if (app->IsReady())
		{
			switch (uMsg)
			{
			case WM_LBUTTONDOWN:
			SetCapture(app->mHWnd);
			app->mInput->OnCursorPressed(cursorPos);
			break;

			case WM_LBUTTONUP:
			app->mInput->OnCursorReleased();
			ReleaseCapture();
			break;

			case WM_RBUTTONDOWN:
			SetCapture(app->mHWnd);
			app->mInput->OnAltCursorPressed(cursorPos);
			break;

			case WM_RBUTTONUP:
			app->mInput->OnAltCursorReleased();
			ReleaseCapture();
			break;

			case WM_MBUTTONDOWN:
			SetCapture(app->mHWnd);
			app->mInput->OnAlt2CursorPressed(cursorPos);
			break;

			case WM_MBUTTONUP:
			app->mInput->OnAlt2CursorReleased();
			ReleaseCapture();
			break;

			case WM_KEYDOWN:
			key = (int)wParam;
			app->mInput->OnKeyPressed(key);
			break;

			case WM_KEYUP:
			app->mInput->OnKeyReleased((int)wParam);
			break;

			case WM_MOUSEMOVE:
			app->mInput->OnCursorMoved(cursorPos, 0);
			app->mInput->GetCursor()->delta -= app->mCursorCorrectionDelta;
			app->mCursorCorrectionDelta = Vec2F();
			break;

			case WM_MOUSEWHEEL:
			wheelDelta = GET_WHEEL_DELTA_WPARAM(wParam);
			app->mInput->OnMouseWheel(wheelDelta);
			break;

			case WM_ACTIVATEAPP:
			case WM_ENABLE:
			if (wParam == TRUE)
			{
				app->mActive = true;
				app->OnActivated();
				app->onActivated.Invoke();
				o2Events.OnApplicationActivated();
			}
			else
			{
				app->mActive = false;
				app->OnDeactivated();
				app->onDeactivated.Invoke();
				o2Events.OnApplicationDeactivated();
			}
			break;

			case WM_SIZE:
			GetWindowRect(app->mHWnd, &rt);
			size.x = rt.right - rt.left; size.y = rt.bottom - rt.top;

			if (size.x > 0 && size.y > 0 && size != app->mWindowedSize)
			{
				app->mWindowedSize = size;
				app->mRender->OnFrameResized();
				app->onResizing.Invoke();
				app->OnResizing();
				o2Events.OnApplicationSized();
			}
			app->ProcessFrame();

			break;

			case WM_MOVE:
			GetWindowRect(app->mHWnd, &rt);
			pos.x = rt.left; pos.y = rt.top;

			if (pos.x < 10000 && pos.y < 10000 && pos != app->mWindowedPos)
			{
				app->mWindowedPos = pos;
				app->OnMoved();
				app->onMoving.Invoke();
			}
			break;

			case WM_DESTROY:
			PostQuitMessage(0);
			return 0;
			break;
			}
		}

		return DefWindowProc(wnd, uMsg, wParam, lParam);
	}
コード例 #22
0
ファイル: main.cpp プロジェクト: yodergj/HandDetection
int main(int argc, char *argv[])
{
#if 0
  QCoreApplication a(argc, argv);
  return a.exec();
#endif
  VideoDecoder* videoDecoder = new VideoDecoder;
  VideoEncoder* videoEncoder = 0;
  AdaboostClassifier* openClassifier = new AdaboostClassifier;
  AdaboostClassifier* closedClassifier = new AdaboostClassifier;
  HandyTracker tracker;

  if ( argc != 5 )
  {
    printf("Usage: %s <open classifier> <closed classifier> <input video> <output video>\n", argv[0]);
    return 0;
  }

  if ( !openClassifier->Load(argv[1]) )
  {
    fprintf(stderr, "Failed loading open classifier\n", argv[1]);
    return 1;
  }

  if ( !tracker.SetOpenClassifier(openClassifier) )
  {
    fprintf(stderr, "Failed setting open classifier\n");
    return 1;
  }

  if ( !closedClassifier->Load(argv[2]) )
  {
    fprintf(stderr, "Failed loading closed classifier\n", argv[2]);
    return 1;
  }

  if ( !tracker.SetClosedClassifier(closedClassifier) )
  {
    fprintf(stderr, "Failed setting closed classifier\n");
    return 1;
  }

  videoDecoder->SetFilename(argv[3]);
  if ( !videoDecoder->Load() )
  {
    fprintf(stderr, "Failed loading video <%s>\n", argv[3]);
    return 1;
  }

  if ( !videoDecoder->UpdateFrame() )
  {
    fprintf(stderr, "Failed updating frame\n");
    return 1;
  }

  int frameNumber = 0;
  bool trackingInitialized = false;
  Image* img = videoDecoder->GetFrame();
  while ( img )
  {
    if ( !videoEncoder )
    {
      videoEncoder = new VideoEncoder;
      if ( !videoEncoder->Open(argv[4], img->GetWidth(), img->GetHeight(), 25) )
      {
        fprintf(stderr, "Failed opening output video <%s>\n", argv[4]);
        return 1;
      }
    }

    ProcessFrame(img, &tracker, trackingInitialized, frameNumber);
    if ( trackingInitialized )
      DrawResults(img, &tracker, frameNumber);

    videoEncoder->AddFrame(img);

    if ( frameNumber > 1 )
      tracker.PurgeRegion(frameNumber - 2);
    frameNumber++;

    videoDecoder->UpdateFrame();
    img = videoDecoder->GetFrame();
  }
  videoEncoder->Close();

  return 0;
}
コード例 #23
0
ファイル: OBSVideoCapture.cpp プロジェクト: kerhong/OBS
void OBS::EncodeLoop()
{
    QWORD streamTimeStart = GetQPCTimeNS();
    QWORD frameTimeNS = 1000000000/fps;
    bool bufferedFrames = true; //to avoid constantly polling number of frames
    int numTotalDuplicatedFrames = 0, numTotalFrames = 0;

    bufferedTimes.Clear();

    bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;

    QWORD sleepTargetTime = streamTimeStart+frameTimeNS;
    latestVideoTime = firstSceneTimestamp = streamTimeStart/1000000;
    latestVideoTimeNS = streamTimeStart;

    firstFrameTimestamp = 0;

    EncoderPicture *lastPic = NULL;

    CircularList<QWORD> bufferedTimes;

    while(!bShutdownEncodeThread || (bufferedFrames && !bTestStream)) {
        SetEvent(hVideoEvent);
        SleepToNS(sleepTargetTime);
        latestVideoTime = sleepTargetTime/1000000;
        latestVideoTimeNS = sleepTargetTime;

        bufferedTimes << latestVideoTime;

        if (curFramePic && firstFrameTimestamp) {
            while (bufferedTimes[0] < firstFrameTimestamp)
                bufferedTimes.Remove(0);

            DWORD curFrameTimestamp = DWORD(bufferedTimes[0] - firstFrameTimestamp);
            bufferedTimes.Remove(0);

            profileIn("encoder thread frame");

            FrameProcessInfo frameInfo;
            frameInfo.firstFrameTime = firstFrameTimestamp;
            frameInfo.frameTimestamp = curFrameTimestamp;
            frameInfo.pic = curFramePic;

            if (lastPic == frameInfo.pic)
                numTotalDuplicatedFrames++;

            if(bUsingQSV)
                curFramePic->mfxOut->Data.TimeStamp = curFrameTimestamp;
            else
                curFramePic->picOut->i_pts = curFrameTimestamp;

            ProcessFrame(frameInfo);

            if (bShutdownEncodeThread)
                bufferedFrames = videoEncoder->HasBufferedFrames();

            lastPic = frameInfo.pic;

            profileOut;

            numTotalFrames++;
        }

        sleepTargetTime += frameTimeNS;
    }

    //flush all video frames in the "scene buffering time" buffer
    if (firstFrameTimestamp && bufferedVideo.Num())
    {
        QWORD startTime = GetQPCTimeMS();
        DWORD baseTimestamp = bufferedVideo[0].timestamp;

        for(UINT i=0; i<bufferedVideo.Num(); i++)
        {
            //we measure our own time rather than sleep between frames due to potential sleep drift
            QWORD curTime;
            do
            {
                curTime = GetQPCTimeMS();
                OSSleep (1);
            } while (curTime - startTime < bufferedVideo[i].timestamp - baseTimestamp);

            SendFrame(bufferedVideo[i], firstFrameTimestamp);
            bufferedVideo[i].Clear();

            numTotalFrames++;
        }

        bufferedVideo.Clear();
    }

    Log(TEXT("Total frames encoded: %d, total frames duplicated %d (%0.2f%%)"), numTotalFrames, numTotalDuplicatedFrames, (double(numTotalDuplicatedFrames)/double(numTotalFrames))*100.0);

    SetEvent(hVideoEvent);
    bShutdownVideoThread = true;
}
コード例 #24
0
ファイル: OBSVideoCapture.cpp プロジェクト: Crehl/OBS
//todo: this function is an abomination, this is just disgusting.  fix it.
//...seriously, this is really, really horrible.  I mean this is amazingly bad.
void OBS::MainCaptureLoop()
{
    int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0;
    int copyWait = NUM_RENDER_BUFFERS-1;

    bSentHeaders = false;
    bFirstAudioPacket = true;

    bool bLogLongFramesProfile = GlobalConfig->GetInt(TEXT("General"), TEXT("LogLongFramesProfile"), LOGLONGFRAMESDEFAULT) != 0;
    float logLongFramesProfilePercentage = GlobalConfig->GetFloat(TEXT("General"), TEXT("LogLongFramesProfilePercentage"), 10.f);

    Vect2 baseSize    = Vect2(float(baseCX), float(baseCY));
    Vect2 outputSize  = Vect2(float(outputCX), float(outputCY));
    Vect2 scaleSize   = Vect2(float(scaleCX), float(scaleCY));

    HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI"));

    //----------------------------------------
    // x264 input buffers

    int curOutBuffer = 0;

    bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;
    if(bUsingQSV)
        bUsing444 = false;

    EncoderPicture lastPic;
    EncoderPicture outPics[NUM_OUT_BUFFERS];
    DWORD outTimes[NUM_OUT_BUFFERS] = {0, 0, 0};

    for(int i=0; i<NUM_OUT_BUFFERS; i++)
    {
        if(bUsingQSV)
        {
            outPics[i].mfxOut = new mfxFrameSurface1;
            memset(outPics[i].mfxOut, 0, sizeof(mfxFrameSurface1));
            mfxFrameData& data = outPics[i].mfxOut->Data;
            videoEncoder->RequestBuffers(&data);
        }
        else
        {
            outPics[i].picOut = new x264_picture_t;
            x264_picture_init(outPics[i].picOut);
        }
    }

    if(bUsing444)
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
        {
            outPics[i].picOut->img.i_csp   = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
            outPics[i].picOut->img.i_plane = 1;
        }
    }
    else
    {
        if(!bUsingQSV)
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
                x264_picture_alloc(outPics[i].picOut, X264_CSP_NV12, outputCX, outputCY);
    }

    int bCongestionControl = AppConfig->GetInt (TEXT("Video Encoding"), TEXT("CongestionControl"), 0);
    bool bDynamicBitrateSupported = App->GetVideoEncoder()->DynamicBitrateSupported();
    int defaultBitRate = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int currentBitRate = defaultBitRate;
    QWORD lastAdjustmentTime = 0;
    UINT adjustmentStreamId = 0;

    //----------------------------------------
    // time/timestamp stuff

    bufferedTimes.Clear();
    ctsOffsets.Clear();
    int bufferedFrames = 1; //to avoid constantly polling number of frames

#ifdef USE_100NS_TIME
    QWORD streamTimeStart = GetQPCTime100NS();
    QWORD frameTime100ns = 10000000/fps;

    QWORD sleepTargetTime = 0;
    bool bWasLaggedFrame = false;
#else
    DWORD streamTimeStart = OSGetTime();
    DWORD fpsTimeAdjust = 0;
#endif
    totalStreamTime = 0;

    lastAudioTimestamp = 0;

    latestVideoTime = firstSceneTimestamp = GetQPCTimeMS();

    DWORD fpsTimeNumerator = 1000-(frameTime*fps);
    DWORD fpsTimeDenominator = fps;
    DWORD cfrTime = 0;
    DWORD cfrTimeAdjust = 0;

    //----------------------------------------
    // start audio capture streams

    desktopAudio->StartCapture();
    if(micAudio) micAudio->StartCapture();

    //----------------------------------------
    // status bar/statistics stuff

    DWORD fpsCounter = 0;

    int numLongFrames = 0;
    int numTotalFrames = 0;

    int numTotalDuplicatedFrames = 0;

    bytesPerSec = 0;
    captureFPS = 0;
    curFramesDropped = 0;
    curStrain = 0.0;
    PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);

    QWORD lastBytesSent[3] = {0, 0, 0};
    DWORD lastFramesDropped = 0;
#ifdef USE_100NS_TIME
    double bpsTime = 0.0;
#else
    float bpsTime = 0.0f;
#endif
    double lastStrain = 0.0f;
    DWORD numSecondsWaited = 0;

    //----------------------------------------
    // 444->420 thread data

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads);
    Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads);

    zero(h420Threads, sizeof(HANDLE)*numThreads);
    zero(convertInfo, sizeof(Convert444Data)*numThreads);

    for(int i=0; i<numThreads; i++)
    {
        convertInfo[i].width  = outputCX;
        convertInfo[i].height = outputCY;
        convertInfo[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].bNV12 = bUsingQSV;

        if(i == 0)
            convertInfo[i].startY = 0;
        else
            convertInfo[i].startY = convertInfo[i-1].endY;

        if(i == (numThreads-1))
            convertInfo[i].endY = outputCY;
        else
            convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bool bFirstFrame = true;
    bool bFirstImage = true;
    bool bFirst420Encode = true;
    bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444;

    List<HANDLE> completeEvents;

    if(bUseThreaded420)
    {
        for(int i=0; i<numThreads; i++)
        {
            h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i);
            completeEvents << convertInfo[i].hSignalComplete;
        }
    }

    //----------------------------------------

    QWORD curStreamTime = 0, lastStreamTime, firstFrameTime = GetQPCTimeMS();

#ifdef USE_100NS_TIME
    lastStreamTime = GetQPCTime100NS()-frameTime100ns;
#else
    lastStreamTime = firstFrameTime-frameTime;
#endif

    //bool bFirstAudioPacket = true;

    List<ProfilerNode> threadedProfilers;
    bool bUsingThreadedProfilers = false;

    while(bRunning || bufferedFrames)
    {
#ifdef USE_100NS_TIME
        QWORD renderStartTime = GetQPCTime100NS();
        totalStreamTime = DWORD((renderStartTime-streamTimeStart)/10000);

        if(sleepTargetTime == 0 || bWasLaggedFrame)
            sleepTargetTime = renderStartTime;
#else
        DWORD renderStartTime = OSGetTime();
        totalStreamTime = renderStartTime-streamTimeStart;

        DWORD frameTimeAdjust = frameTime;
        fpsTimeAdjust += fpsTimeNumerator;
        if(fpsTimeAdjust > fpsTimeDenominator)
        {
            fpsTimeAdjust -= fpsTimeDenominator;
            ++frameTimeAdjust;
        }
#endif

        bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled;

        profileIn("frame");

#ifdef USE_100NS_TIME
        QWORD qwTime = renderStartTime/10000;
        latestVideoTime = qwTime;

        QWORD frameDelta = renderStartTime-lastStreamTime;
        double fSeconds = double(frameDelta)*0.0000001;

        //Log(TEXT("frameDelta: %f"), fSeconds);

        lastStreamTime = renderStartTime;
#else
        QWORD qwTime = GetQPCTimeMS();
        latestVideoTime = qwTime;

        QWORD frameDelta = qwTime-lastStreamTime;
        float fSeconds = float(frameDelta)*0.001f;

        //Log(TEXT("frameDelta: %llu"), frameDelta);

        lastStreamTime = qwTime;
#endif

        bool bUpdateBPS = false;
        profileIn("frame preprocessing and rendering");

        //------------------------------------

        if(bRequestKeyframe && keyframeWait > 0)
        {
            keyframeWait -= int(frameDelta);

            if(keyframeWait <= 0)
            {
                GetVideoEncoder()->RequestKeyframe();
                bRequestKeyframe = false;
            }
        }

        if(!bPushToTalkDown && pushToTalkTimeLeft > 0)
        {
            pushToTalkTimeLeft -= int(frameDelta);
            OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft);
            if(pushToTalkTimeLeft <= 0)
            {
                pushToTalkTimeLeft = 0;
                bPushToTalkOn = false;
            }
        }

        //------------------------------------

        OSEnterMutex(hSceneMutex);

        if(bResizeRenderView)
        {
            GS->ResizeView();
            bResizeRenderView = false;
        }

        //------------------------------------

        if(scene)
        {
            profileIn("scene->Preprocess");
            scene->Preprocess();

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Preprocess();

            profileOut;

            scene->Tick(float(fSeconds));

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Tick(float(fSeconds));
        }

        //------------------------------------

        QWORD curBytesSent = network->GetCurrentSentBytes();
        curFramesDropped = network->NumDroppedFrames();

        bpsTime += fSeconds;
        if(bpsTime > 1.0f)
        {
            if(numSecondsWaited < 3)
                ++numSecondsWaited;

            //bytesPerSec = DWORD(curBytesSent - lastBytesSent);
            bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited;

            if(bpsTime > 2.0)
                bpsTime = 0.0f;
            else
                bpsTime -= 1.0;

            if(numSecondsWaited == 3)
            {
                lastBytesSent[0] = lastBytesSent[1];
                lastBytesSent[1] = lastBytesSent[2];
                lastBytesSent[2] = curBytesSent;
            }
            else
                lastBytesSent[numSecondsWaited] = curBytesSent;

            captureFPS = fpsCounter;
            fpsCounter = 0;

            bUpdateBPS = true;
        }

        fpsCounter++;

        curStrain = network->GetPacketStrain();

        EnableBlending(TRUE);
        BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA);

        //------------------------------------
        // render the mini render texture

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(mainPixelShader);

        SetRenderTarget(mainRenderTextures[curRenderTarget]);

        Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0, 0, baseSize.x, baseSize.y);

        if(scene)
            scene->Render();

        //------------------------------------

        if(bTransitioning)
        {
            if(!transitionTexture)
            {
                transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE);
                if(transitionTexture)
                {
                    D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture);
                    D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]);
                    GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture);
                }
                else
                    bTransitioning = false;
            }
            else if(transitionAlpha >= 1.0f)
            {
                delete transitionTexture;
                transitionTexture = NULL;

                bTransitioning = false;
            }
        }

        if(bTransitioning)
        {
            EnableBlending(TRUE);
            transitionAlpha += float(fSeconds)*5.0f;
            if(transitionAlpha > 1.0f)
                transitionAlpha = 1.0f;
        }
        else
            EnableBlending(FALSE);

        //------------------------------------
        // render the mini view thingy

        if(bRenderView)
        {
            // Cache
            const Vect2 renderFrameSize = GetRenderFrameSize();
            const Vect2 renderFrameOffset = GetRenderFrameOffset();
            const Vect2 renderFrameCtrlSize = GetRenderFrameControlSize();

            SetRenderTarget(NULL);

            LoadVertexShader(mainVertexShader);
            LoadPixelShader(mainPixelShader);

            Ortho(0.0f, renderFrameCtrlSize.x, renderFrameCtrlSize.y, 0.0f, -100.0f, 100.0f);
            if(renderFrameCtrlSize.x != oldRenderFrameCtrlWidth || renderFrameCtrlSize.y != oldRenderFrameCtrlHeight)
            {
                // User is drag resizing the window. We don't recreate the swap chains so our coordinates are wrong
                SetViewport(0.0f, 0.0f, (float)oldRenderFrameCtrlWidth, (float)oldRenderFrameCtrlHeight);
            }
            else
                SetViewport(0.0f, 0.0f, renderFrameCtrlSize.x, renderFrameCtrlSize.y);

            // Draw background (Black if fullscreen, window colour otherwise)
            if(bFullscreenMode)
                ClearColorBuffer(0x000000);
            else
                ClearColorBuffer(GetSysColor(COLOR_BTNFACE));

            if(bTransitioning)
            {
                BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
                DrawSprite(transitionTexture, 0xFFFFFFFF,
                        renderFrameOffset.x, renderFrameOffset.y,
                        renderFrameOffset.x + renderFrameSize.x, renderFrameOffset.y + renderFrameSize.y);
                BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
            }

            DrawSprite(mainRenderTextures[curRenderTarget], 0xFFFFFFFF,
                    renderFrameOffset.x, renderFrameOffset.y,
                    renderFrameOffset.x + renderFrameSize.x, renderFrameOffset.y + renderFrameSize.y);

            //draw selections if in edit mode
            if(bEditMode && !bSizeChanging)
            {
                if(scene) {
                    LoadVertexShader(solidVertexShader);
                    LoadPixelShader(solidPixelShader);
                    solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFF0000);
                    scene->RenderSelections(solidPixelShader);
                }
            }
        }
        else if(bForceRenderViewErase)
        {
            InvalidateRect(hwndRenderFrame, NULL, TRUE);
            UpdateWindow(hwndRenderFrame);
            bForceRenderViewErase = false;
        }

        //------------------------------------
        // actual stream output

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(yuvScalePixelShader);

        Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget];
        SetRenderTarget(yuvRenderTexture);

        if(downscale < 2.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize);
        else if(downscale < 3.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/(outputSize*3.0f));

        Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y);

        //why am I using scaleSize instead of outputSize for the texture?
        //because outputSize can be trimmed by up to three pixels due to 128-bit alignment.
        //using the scale function with outputSize can cause slightly inaccurate scaled images
        if(bTransitioning)
        {
            BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
            DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, 1.0f, 1.0f);
            BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
        }

        DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, 1.0f, 1.0f);

        //------------------------------------

        if(bRenderView && !copyWait)
            static_cast<D3D10System*>(GS)->swap->Present(0, 0);

        OSLeaveMutex(hSceneMutex);

        profileOut;

        //------------------------------------
        // present/upload

        profileIn("video encoding and uploading");

        bool bEncode = true;

        if(copyWait)
        {
            copyWait--;
            bEncode = false;
        }
        else
        {
            //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing
            if(!bRecievedFirstAudioFrame)
            {
                static bool bWarnedAboutNoAudio = false;
                if (qwTime-firstFrameTime > 10000 && !bWarnedAboutNoAudio)
                {
                    bWarnedAboutNoAudio = true;
                    //AddStreamInfo (TEXT ("WARNING: OBS is not receiving audio frames. Please check your audio devices."), StreamInfoPriority_Critical); 
                }
                bEncode = false;
            }
            else if(bFirstFrame)
            {
                firstFrameTime = qwTime;
                bFirstFrame = false;
            }

            if(!bEncode)
            {
                if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                    curYUVTexture = 0;
                else
                    curYUVTexture++;
            }
        }

        if(bEncode)
        {
            curStreamTime = qwTime-firstFrameTime;

            UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1;

            ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
            profileIn("CopyResource");

            if(!bFirst420Encode && bUseThreaded420)
            {
                WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE);
                copyTexture->Unmap(0);
            }

            D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]);
            GetD3D()->CopyResource(copyTexture, d3dYUV->texture);
            profileOut;

            ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture];

            if(bFirstImage) //ignore the first frame
                bFirstImage = false;
            else
            {
                HRESULT result;
                D3D10_MAPPED_TEXTURE2D map;
                if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map)))
                {
                    int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1;
                    int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1;

                    EncoderPicture &prevPicOut = outPics[prevOutBuffer];
                    EncoderPicture &picOut = outPics[curOutBuffer];
                    EncoderPicture &nextPicOut = outPics[nextOutBuffer];

                    if(!bUsing444)
                    {
                        profileIn("conversion to 4:2:0");

                        if(bUseThreaded420)
                        {
                            outTimes[nextOutBuffer] = (DWORD)curStreamTime;

                            bool firstRun = threadedProfilers.Num() == 0;
                            if(firstRun)
                                threadedProfilers.SetSize(numThreads);

                            for(int i=0; i<numThreads; i++)
                            {
                                convertInfo[i].input     = (LPBYTE)map.pData;
                                convertInfo[i].inPitch   = map.RowPitch;
                                if(bUsingQSV)
                                {
                                    mfxFrameData& data = nextPicOut.mfxOut->Data;
                                    videoEncoder->RequestBuffers(&data);
                                    convertInfo[i].outPitch  = data.Pitch;
                                    convertInfo[i].output[0] = data.Y;
                                    convertInfo[i].output[1] = data.UV;
                                }
                                else
                                {
                                    convertInfo[i].output[0] = nextPicOut.picOut->img.plane[0];
                                    convertInfo[i].output[1] = nextPicOut.picOut->img.plane[1];
                                    convertInfo[i].output[2] = nextPicOut.picOut->img.plane[2];
								}
                                if(!firstRun)
                                    threadedProfilers[i].~ProfilerNode();
                                ::new (&threadedProfilers[i]) ProfilerNode(TEXT("Convert444Threads"), true);
                                threadedProfilers[i].MonitorThread(h420Threads[i]);
                                bUsingThreadedProfilers = true;
                                SetEvent(convertInfo[i].hSignalConvert);
                            }

                            if(bFirst420Encode)
                                bFirst420Encode = bEncode = false;
                        }
                        else
                        {
                            outTimes[curOutBuffer] = (DWORD)curStreamTime;
                            if(bUsingQSV)
                            {
                                mfxFrameData& data = picOut.mfxOut->Data;
                                videoEncoder->RequestBuffers(&data);
                                LPBYTE output[] = {data.Y, data.UV};
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, data.Pitch, outputCY, 0, outputCY, output);
                            }
                            else
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, outputCX, outputCY, 0, outputCY, picOut.picOut->img.plane);
                            prevTexture->Unmap(0);
                        }

                        profileOut;
                    }
                    else
                    {
                        outTimes[curOutBuffer] = (DWORD)curStreamTime;

                        picOut.picOut->img.i_stride[0] = map.RowPitch;
                        picOut.picOut->img.plane[0]    = (uint8_t*)map.pData;
                    }

                    if(bEncode)
                    {
                        DWORD curFrameTimestamp = outTimes[prevOutBuffer];
                        //Log(TEXT("curFrameTimestamp: %u"), curFrameTimestamp);

                        //------------------------------------

                        FrameProcessInfo frameInfo;
                        frameInfo.firstFrameTime = firstFrameTime;
                        frameInfo.prevTexture = prevTexture;

                        if(bDupeFrames)
                        {
                            while(cfrTime < curFrameTimestamp)
                            {
                                DWORD frameTimeAdjust = frameTime;
                                cfrTimeAdjust += fpsTimeNumerator;
                                if(cfrTimeAdjust > fpsTimeDenominator)
                                {
                                    cfrTimeAdjust -= fpsTimeDenominator;
                                    ++frameTimeAdjust;
                                }

                                DWORD halfTime = (frameTimeAdjust+1)/2;

                                EncoderPicture &nextPic = (curFrameTimestamp-cfrTime <= halfTime) ? picOut : prevPicOut;
                                //Log(TEXT("cfrTime: %u, time: %u"), cfrTime, curFrameTimestamp);

                                //these lines are just for counting duped frames
                                if(nextPic == lastPic)
                                    ++numTotalDuplicatedFrames;
                                else
                                    lastPic = nextPic;

                                frameInfo.pic = &nextPic;
                                if(bUsingQSV)
                                    frameInfo.pic->mfxOut->Data.TimeStamp = cfrTime;
                                else
                                    frameInfo.pic->picOut->i_pts = cfrTime;
                                frameInfo.frameTimestamp = cfrTime;
                                ProcessFrame(frameInfo);

                                cfrTime += frameTimeAdjust;

                                //Log(TEXT("cfrTime: %u, chi frame: %u"), cfrTime, (curFrameTimestamp-cfrTime <= halfTime));
                            }
                        }
                        else
                        {
                            if(bUsingQSV)
                                picOut.mfxOut->Data.TimeStamp = curFrameTimestamp;
                            else
                                picOut.picOut->i_pts = curFrameTimestamp;

                            frameInfo.pic = &picOut;
                            frameInfo.frameTimestamp = curFrameTimestamp;

                            ProcessFrame(frameInfo);
                        }

                        if (!bRunning)
                            bufferedFrames = videoEncoder->GetBufferedFrames ();
                    }

                    if(bUsing444)
                    {
                        prevTexture->Unmap(0);
                    }

                    curOutBuffer = nextOutBuffer;
                }
                else
                {
                    //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled
                    if (result == DXGI_ERROR_DEVICE_REMOVED)
                    {
                        String message;

                        HRESULT reason = GetD3D()->GetDeviceRemovedReason();

                        switch (reason)
                        {
                        case DXGI_ERROR_DEVICE_RESET:
                        case DXGI_ERROR_DEVICE_HUNG:
                            message = TEXT("Your video card or driver froze and was reset. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DEVICE_REMOVED:
                            message = TEXT("Your video card disappeared from the system. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DRIVER_INTERNAL_ERROR:
                            message = TEXT("Your video driver reported an internal error. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_INVALID_CALL:
                            message = TEXT("Your video driver reported an invalid call. Please check for possible driver issues.");
                            break;
                        default:
                            message = TEXT("DXGI_ERROR_DEVICE_REMOVED");
                            break;
                        }

                        CrashError (TEXT("Texture->Map failed: 0x%08x 0x%08x\r\n\r\n%s"), result, reason, message.Array());
                    }
                    else
                        CrashError (TEXT("Texture->Map failed: 0x%08x"), result);
                }
            }

            if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
                curCopyTexture = 0;
            else
                curCopyTexture++;

            if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                curYUVTexture = 0;
            else
                curYUVTexture++;

            if (bCongestionControl && bDynamicBitrateSupported && !bTestStream)
            {
                if (curStrain > 25)
                {
                    if (qwTime - lastAdjustmentTime > 1500)
                    {
                        if (currentBitRate > 100)
                        {
                            currentBitRate = (int)(currentBitRate * (1.0 - (curStrain / 400)));
                            App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                            if (!adjustmentStreamId)
                                adjustmentStreamId = App->AddStreamInfo (FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                            else
                                App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array());

                            bUpdateBPS = true;
                        }

                        lastAdjustmentTime = qwTime;
                    }
                }
                else if (currentBitRate < defaultBitRate && curStrain < 5 && lastStrain < 5)
                {
                    if (qwTime - lastAdjustmentTime > 5000)
                    {
                        if (currentBitRate < defaultBitRate)
                        {
                            currentBitRate += (int)(defaultBitRate * 0.05);
                            if (currentBitRate > defaultBitRate)
                                currentBitRate = defaultBitRate;
                        }

                        App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                        /*if (!adjustmentStreamId)
                            App->AddStreamInfo (FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                        else
                            App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array());*/

                        bUpdateBPS = true;

                        lastAdjustmentTime = qwTime;
                    }
                }
            }
        }

        lastRenderTarget = curRenderTarget;

        if(curRenderTarget == (NUM_RENDER_BUFFERS-1))
            curRenderTarget = 0;
        else
            curRenderTarget++;

        if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped)
        {
            PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);
            lastStrain = curStrain;

            lastFramesDropped = curFramesDropped;
        }

        //------------------------------------
        // we're about to sleep so we should flush the d3d command queue
        profileIn("flush");
        GetD3D()->Flush();
        profileOut;

        profileOut; //video encoding and uploading
        profileOut; //frame

        //------------------------------------
        // frame sync

#ifdef USE_100NS_TIME
        QWORD renderStopTime = GetQPCTime100NS();
        sleepTargetTime += frameTime100ns;

        if(bWasLaggedFrame = (sleepTargetTime <= renderStopTime))
        {
            numLongFrames++;
            if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage)
                DumpLastProfileData();
        }
        else
            SleepTo(sleepTargetTime);
#else
        DWORD renderStopTime = OSGetTime();
        DWORD totalTime = renderStopTime-renderStartTime;

        if(totalTime > frameTimeAdjust)
        {
            numLongFrames++;
            if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage)
                DumpLastProfileData();
        }
        else if(totalTime < frameTimeAdjust)
            OSSleep(frameTimeAdjust-totalTime);
#endif

        //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime);

        numTotalFrames++;
    }

    if(!bUsing444)
    {
        if(bUseThreaded420)
        {
            for(int i=0; i<numThreads; i++)
            {
                if(h420Threads[i])
                {
                    convertInfo[i].bKillThread = true;
                    SetEvent(convertInfo[i].hSignalConvert);
                    if(bUsingThreadedProfilers)
                        threadedProfilers[i].~ProfilerNode();

                    OSTerminateThread(h420Threads[i], 10000);
                    h420Threads[i] = NULL;
                }

                if(convertInfo[i].hSignalConvert)
                {
                    CloseHandle(convertInfo[i].hSignalConvert);
                    convertInfo[i].hSignalConvert = NULL;
                }

                if(convertInfo[i].hSignalComplete)
                {
                    CloseHandle(convertInfo[i].hSignalComplete);
                    convertInfo[i].hSignalComplete = NULL;
                }
            }

            if(!bFirst420Encode)
            {
                ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
                copyTexture->Unmap(0);
            }
        }

        if(bUsingQSV)
            for(int i = 0; i < NUM_OUT_BUFFERS; i++)
                delete outPics[i].mfxOut;
        else
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
            {
                x264_picture_clean(outPics[i].picOut);
                delete outPics[i].picOut;
            }
    }

    Free(h420Threads);
    Free(convertInfo);

    Log(TEXT("Total frames rendered: %d, number of frames that lagged: %d (%0.2f%%) (it's okay for some frames to lag)"), numTotalFrames, numLongFrames, (double(numLongFrames)/double(numTotalFrames))*100.0);
    if(bDupeFrames)
        Log(TEXT("Total duplicated frames: %d (%0.2f%%)"), numTotalDuplicatedFrames, (double(numTotalDuplicatedFrames)/double(numTotalFrames))*100.0);
}
コード例 #25
0
ファイル: mainstate.c プロジェクト: tpluess/ebv
Msg const *MainState_top(MainState *me, Msg *msg)
{
	struct APPLICATION_STATE *pState;
	switch (msg->evt)
	{
	case START_EVT:
		/* initialize the whole stuff - is this the right place ? */
		STATE_START(me, &me->showgray);
		data.ipc.state.enAppMode = APP_CAPTURE_ON;
		data.pCurRawImg = data.u8FrameBuffers[0];
		data.nExposureTimeChanged = true;
		data.ipc.state.nExposureTime = 25;
		data.ipc.state.nStepCounter = 0;
		data.ipc.state.nThreshold = 30;
		return 0;
	case IPC_GET_APP_STATE_EVT:
		/* Fill in the response and schedule an acknowledge for the request. */
		pState = (struct APPLICATION_STATE*)data.ipc.req.pAddr;
		memcpy(pState, &data.ipc.state, sizeof(struct APPLICATION_STATE));

		data.ipc.enReqState = REQ_STATE_ACK_PENDING;
		return 0;
	case FRAMESEQ_EVT:
		/* Timestamp the capture of the image. */
		data.ipc.state.imageTimeStamp = OscSupCycGet();
		data.ipc.state.bNewImageReady = TRUE;
		/* Sleep here for a short while in order not to violate the vertical
		 * blank time of the camera sensor when triggering a new image
		 * right after receiving the old one. This can be removed if some
		 * heavy calculations are done here. */
		usleep(4000);
		return 0;
	case FRAMEPAR_EVT:
	{
		/* we have a new image increase counter: here and only here! */
		data.ipc.state.nStepCounter++;
		/* debayer the image first -> to half size*/
		OscVisDebayerGreyscaleHalfSize( data.pCurRawImg, OSC_CAM_MAX_IMAGE_WIDTH, OSC_CAM_MAX_IMAGE_HEIGHT, ROW_BGBG, data.u8TempImage[GRAYSCALE]);
		/* Process the image. */
		/* the parameter is not really required */
		ProcessFrame(data.u8TempImage[GRAYSCALE]);

		return 0;
	}
	case IPC_SET_IMAGE_TYPE_EVT:
	{
		if(data.ipc.state.nImageType == GRAYSCALE) {
			STATE_TRAN(me, &me->showgray);
		}
		else if(data.ipc.state.nImageType == THRESHOLD) {
			STATE_TRAN(me, &me->showthresh);
		}
		else if(data.ipc.state.nImageType == EROSION) {
			STATE_TRAN(me, &me->showerosion);
		}
    else if(data.ipc.state.nImageType == DILATION) {
      STATE_TRAN(me, &me->showdilation);
    }
    else if(data.ipc.state.nImageType == LABELIMG) {
      STATE_TRAN(me, &me->showlabel);
    }
		else {
			data.ipc.enReqState = REQ_STATE_NACK_PENDING;
		}
		data.ipc.enReqState = REQ_STATE_ACK_PENDING;
		return 0;
	}
	case IPC_GET_NEW_IMG_EVT:
		/* If the IPC event is not handled in the actual substate, a negative acknowledge is returned by default. */
		data.ipc.enReqState = REQ_STATE_NACK_PENDING;
	return 0;
	}
	return msg;
}
コード例 #26
0
ファイル: test_recorder.cpp プロジェクト: telnykha/VideoA
int _tmain(int argc, _TCHAR* argv[])
{
	/*

	Начальная инициализация параметров. этот 
	код требуется запустить один раз. 

	TVAInitParams params;
	memcpy(&params.Camera, &g_camera, sizeof(TVACamera));
	params.NumZones = 0;
	params.EventSens = 0.5;
	params.EventTimeSens = 1000;
	SaveInitParams("params.xml", &params);
	*/
	// инициализация зон наблюдения. 
	for (int i = 0; i < C_MAX_OBJECTS; i++)
	{
		g_contours[i].IsRect = false;
		g_contours[i].NumPoints = C_MAX_POINTS;
		g_contours[i].Points = (TVAPoint*)malloc(C_MAX_POINTS*sizeof(TVAPoint));
	}
	
	cvInitFont(&g_font, CV_FONT_HERSHEY_PLAIN,1, 1);

	CvCapture* capture = NULL;
	if (argc < 2)
		capture = cvCaptureFromCAM(0);
	else
		capture = cvCaptureFromFile(argv[1]);
	

	if (capture == NULL)
	{
		printf("%s\n", "Cannot open camera.");
		return -1;
	}

    double fps = cvGetCaptureProperty ( // Получаем частоту кадров
        capture,
        CV_CAP_PROP_FPS
    );

    CvSize size = cvSize( // Получаем размер
       (int)cvGetCaptureProperty( capture, CV_CAP_PROP_FRAME_WIDTH),
       (int)cvGetCaptureProperty( capture, CV_CAP_PROP_FRAME_HEIGHT)
    );
	g_mask = cvCreateImage(size, IPL_DEPTH_8U, 1);
	CvVideoWriter* writer = NULL;
	cvNamedWindow(_MODULE_);
	cvSetMouseCallback(_MODULE_, on_mouse);

	/*
		Цикл получения и обработки изображения. 
	*/
	for (;;) 
	{
		IplImage* frame = NULL;
		frame = cvQueryFrame(capture);
		if (!frame)
			break;
		/*
		 отрисовка прямоугольников
		*/
		for (int i = 0; i < g_rects_count; i++)
		{
			CvPoint p1 = cvPoint(g_rects[i].x, g_rects[i].y);
			CvPoint p2 = cvPoint(p1.x + g_rects[i].width, p1.y + g_rects[i].height);
			
			cvRectangle(frame, p1, p2, CV_RGB(255,0,0));
		}

		/*
			Отрисовка зон наблюдения. 
		*/
		for (int i = 0; i < g_contours_count; i++)
		{
			if (g_contours[i].NumPoints > 0)
			{
				for (int j = 1; j < g_contours[i].NumPoints; j++)
				{
					CvPoint p1 = cvPoint((int)g_contours[i].Points[j-1].X, (int)g_contours[i].Points[j-1].Y);
					CvPoint p2 = cvPoint((int)g_contours[i].Points[j].X, (int)g_contours[i].Points[j].Y);
					cvLine(frame, p1,p2, CV_RGB(255,0,0));
				}
				CvPoint p1 = cvPoint((int)g_contours[i].Points[g_contours[i].NumPoints-1].X, (int)g_contours[i].Points[g_contours[i].NumPoints-1].Y);
				CvPoint p2 = cvPoint((int)g_contours[i].Points[0].X, (int)g_contours[i].Points[0].Y);
				cvLine(frame, p1,p2, CV_RGB(255,0,0));			
			}
		}

		/*
			Отображение полученного изображения в окне. 
		*/
		ProcessFrame(frame);
		if (g_grid_visible)
			DrawGrid(frame);
		DrawStatus(frame);
		cvShowImage(_MODULE_, frame);
		/*
			Запись фрейма
		*/
		if (g_record_video)
			cvWriteFrame( writer, frame );

		/*
			Анализ клавиатуры
		*/
		bool state = g_set_rects || g_set_contours || g_set_zones;
		int c;
		c = cvWaitKey(10);
		if ((char)c == 27)
			break;

		if ((char)c == 's')
		{
			cvSaveImage("out.png", frame);
		}
		else if ((char)c == 'l')
		{
			if (!state)
				LoadRects(size.width, size.height);
		}
		else if ((char)c == 'g')
		{
			if (!state)
				LoadContours(size.width, size.height);
		}
		else if ((char)c == 'k')
		{
			if (!state)
				LoadZones(size.width, size.height);
		}
		else if ((char)c == 'r')
   		{
			if (g_record_video)
			{
			    // завершаем запись на диск
				cvReleaseVideoWriter( &writer );
				writer = NULL;
				g_record_video = false;
				printf("Stop recording.\n");
			}
			else
			{
				// открываем файл для записи и связываем с ним 
				// переменную writer
				writer = cvCreateVideoWriter("out.avi",CV_FOURCC('D','I','V','X'), fps, size );

				if (writer == NULL)
				{
					printf("%s\n", "Cannot create writer.");
				}
				else
				{
					g_record_video = true;
					printf("Start recording.\n");

				}
			}
		}
		else if ((char)c == 't')
		{
			if (g_set_rects)
			{
				SaveRects(size.width, size.height);
				if (!g_set_zones)
				{
					g_rects_count = 0;
					ClearMask(g_mask);
				}
				g_set_rects = false;
			}
			else if (!g_set_contours)
			{
				g_set_rects = true;
			}
		}
		else if ((char)c == 'c')
		{
			
			if (g_set_contours)
			{
				SaveContours(size.width, size.height);		
				if (!g_set_zones)
				{
					g_contours_count = 0;
					ClearMask(g_mask);
				}
				g_set_contours = false;
				g_open_contour = false;
			}
			else if (!g_set_rects)
			{
				g_set_contours = true;
			}
		}
		else if ((char)c == 'z')
		{
			if (g_set_zones)
			{
				SaveZones(size.width, size.height);
				g_set_zones = false;
				g_contours_count = 0;
				g_rects_count = 0;
				ClearMask(g_mask);
			}
			else if (!g_set_rects && !g_set_contours)
			{
				g_set_zones = true;
			}
		}
		else if ((char)c == 'w')
		{
			g_grid_visible = !g_grid_visible;
		}
	}

	cvReleaseVideoWriter( &writer );
	cvDestroyWindow(_MODULE_);
	cvReleaseCapture(&capture);
	cvReleaseImage(&g_mask);
	// освобождение памяти 
	for (int i = 0; i < C_MAX_OBJECTS; i++)
	{
		free(g_contours[i].Points);
	}

	return 0;
}