Esempio n. 1
0
ezTestAppRun ezRendererTestBasics::SubtestClearScreen()
{
  BeginFrame();

  switch (m_iFrame)
  {
    case 0:
      ClearScreen(ezColor(1, 0, 0));
      break;
    case 1:
      ClearScreen(ezColor(0, 1, 0));
      break;
    case 2:
      ClearScreen(ezColor(0, 0, 1));
      break;
    case 3:
      ClearScreen(ezColor(0.5f, 0.5f, 0.5f, 0.5f));
      break;
  }

  EZ_TEST_IMAGE(m_iFrame, 1);

  EndFrame();

  return m_iFrame < 3 ? ezTestAppRun::Continue : ezTestAppRun::Quit;
}
	bool D3D11RenderDevice::Refresh()
	{
		BeginFrame();
		Render();
		EndFrame();
		return false;
	}
Esempio n. 3
0
File: ANIM.CPP Progetto: CGSG/TLR
/* Timer event handle function.
 * ARGUMENTS: None.
 * RETURNS: None.
 */
VOID tlr::anim::Timer( VOID )
{
  /* Handle timer */
  timer::Response();
  static CHAR Buf[100];
  sprintf(Buf, "fps: %.5f", FPS);
  SetWindowText(win::hWnd, Buf);

  /* Handle input system */
  input::Response();

  /* Hanlde animation objects */
  for (INT i = 0; i < Units.Size(); i++)
    Units[i]->Response(this);

  StartFrame();
  for (INT i = 0; i < Units.Size(); i++)
  {
    glPushAttrib(GL_ALL_ATTRIB_BITS);
    Units[i]->Render(this);
    glPopAttrib();
  }
  EndFrame();
  CopyFrame();
  IncrFrameCount();
} /* End of 'tlr::anim::Timer' function */
Esempio n. 4
0
void BasicEngine::Run()
{
	Setup();
	while (running)
	{
		frameBegin = timerProvider->GetTimeElapsed();
		timeElapsed = frameBegin - lastTime;

		++frameCounter;		
		timeCount += timeElapsed;		
		if (timeCount > 1.0)
		{
			currentFPS = frameCounter;
			frameCounter = 0;
			timeCount = 0.0;
		}
		t.dTimeElapsed = frameBegin;
		t.dTimeSinceLastFrame = timeElapsed;
		t.fTimeElapsed = (float)frameBegin;
		t.fTimeSinceLastFrame = (float)timeElapsed;
		Update(t);		
		Display();
		EndFrame();
		if (LimitFPS)
		{
			timerProvider->Sleep(0.016 - timerProvider->GetTimeElapsed() + frameBegin);
		}
		lastTime = frameBegin;
		GUI.Update();
	}
}
Esempio n. 5
0
void Profiler::BeginFrame()
{
    // End the previous frame if any
    EndFrame();
    
    BeginBlock("RunFrame");
}
Esempio n. 6
0
static void Draw()
{
	BeginFrame();

	DrawAxis();

	void ProcessCommands();
	ProcessCommands();

	EndFrame();
}
Esempio n. 7
0
		void SCSapplication::Start () {
			m_iRunning = SCS_TRUE;
			Init();
			Load();
			while (m_iRunning) {
				BeginFrame();
				if (m_iUseTargetFPS) {
					if (m_kFrameTimer.TimeExpired()) {
						m_kFrameTimer.Restart();
						Frame();
						EndFrame();
					}
					continue;
				}
				Frame();
				EndFrame();
			}
			Unload();
			Fullscreen(SCS_FALSE);
			SDL_Quit();
		}
Esempio n. 8
0
/* Redraw window content function.
 * ARGUMENTS:
 *   - window device context:
 *       HDC hDC;
 * RETURNS: None.
 */
VOID physic::anim::Paint( HDC hDC )
{
  StartFrame();
  glPushAttrib(GL_ALL_ATTRIB_BITS);
  Navigation.Render(this);
  glPopAttrib();
  World.ModelUpdate(this);
  World.Render(this);
  EndFrame();
  CopyFrame();
  IncrFrameCount();
} /* End of 'physic::anim::Paint' function */
HRESULT CDXVADecoderMpeg2::DecodeFrameInternal (BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
	HRESULT					hr					= S_FALSE;
	int						nSurfaceIndex		= -1;
	CComPtr<IMediaSample>	pSampleToDeliver;
	int						nFieldType			= -1;
	int						nSliceType			= -1;
	bool					bIsField			= false;
	int						bFrame_repeat_pict	= 0;

	CHECK_HR_FALSE (FFMpeg2DecodeFrame (&m_PictureParams, &m_QMatrixData, m_SliceInfo, &m_nSliceCount, m_pFilter->GetAVCtx(),
					m_pFilter->GetFrame(), &m_nNextCodecIndex, &nFieldType, &nSliceType, pDataIn, nSize, &bIsField, &bFrame_repeat_pict));

	// Wait I frame after a flush
	if (m_bFlushed && (!m_PictureParams.bPicIntra || (bIsField && m_PictureParams.bSecondField))) {
		TRACE_MPEG2 ("CDXVADecoderMpeg2::DecodeFrame() : Flush - wait I frame, %ws\n", FrameType(bIsField, m_PictureParams.bSecondField));
		return S_FALSE;
	}

	CHECK_HR (GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop));

	if (!bIsField || (bIsField && !m_PictureParams.bSecondField)) {
		UpdatePictureParams(nSurfaceIndex);	
	}

	TRACE_MPEG2 ("CDXVADecoderMpeg2::DecodeFrame() : Surf = %d, PictureType = %d, %ws, m_nNextCodecIndex = %d, rtStart = [%I64d]\n",
				 nSurfaceIndex, nSliceType, FrameType(bIsField, m_PictureParams.bSecondField), m_nNextCodecIndex, rtStart);

	{
		CHECK_HR (BeginFrame(nSurfaceIndex, pSampleToDeliver));
		// Send picture parameters
		CHECK_HR (AddExecuteBuffer (DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));
		// Add quantization matrix
		CHECK_HR (AddExecuteBuffer (DXVA2_InverseQuantizationMatrixBufferType, sizeof(m_QMatrixData), &m_QMatrixData));
		// Add slice control
		CHECK_HR (AddExecuteBuffer (DXVA2_SliceControlBufferType, sizeof (DXVA_SliceInfo)*m_nSliceCount, &m_SliceInfo));
		// Add bitstream
		CHECK_HR (AddExecuteBuffer (DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));
		// Decode frame
		CHECK_HR (Execute());
		CHECK_HR (EndFrame(nSurfaceIndex));
	}

	bool bAdded = AddToStore (nSurfaceIndex, pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), rtStart, rtStop,
							  bIsField, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx()));

	if (bAdded) {
		hr = DisplayNextFrame();
	}
		
	m_bFlushed = false;
	return hr;
}
HRESULT CDXVADecoderH264::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
	HRESULT	hr			= S_FALSE;
	int		got_picture	= 0;

	memset(&m_DXVA_Context.DXVA_H264Context, 0, sizeof(DXVA_H264_Context) * _countof(m_DXVA_Context.DXVA_H264Context));
	CHECK_HR_FALSE (FFDecodeFrame(m_pFilter->GetAVCtx(), m_pFilter->GetFrame(),
								  pDataIn, nSize, rtStart,
								  &got_picture));

	if (m_nSurfaceIndex == -1 || !m_DXVA_Context.DXVA_H264Context[0].slice_count) {
		return S_FALSE;
	}

	for (UINT i = 0; i < _countof(m_DXVA_Context.DXVA_H264Context); i++) {
		DXVA_H264_Context* pDXVA_H264_ctx = &m_DXVA_Context.DXVA_H264Context[i];

		if (!pDXVA_H264_ctx->slice_count) {
			continue;
		}

		m_nFieldNum = i;

		pDXVA_H264_ctx->DXVAPicParams.Reserved16Bits				= Reserved16Bits;
		pDXVA_H264_ctx->DXVAPicParams.StatusReportFeedbackNumber	= StatusReportFeedbackNumber++;

		// Begin frame
		CHECK_HR_FALSE (BeginFrame(m_nSurfaceIndex, m_pSampleToDeliver));
		// Add picture parameters
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(DXVA_PicParams_H264), &pDXVA_H264_ctx->DXVAPicParams));
		// Add quantization matrix
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(DXVA_Qmatrix_H264), &pDXVA_H264_ctx->DXVAScalingMatrix));
		// Add bitstream
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_BitStreamDateBufferType));
		// Add slice control
		if (m_bUseLongSlice) {
			CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Long) * pDXVA_H264_ctx->slice_count, pDXVA_H264_ctx->SliceLong));
		} else {
			CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Short) * pDXVA_H264_ctx->slice_count, pDXVA_H264_ctx->SliceShort));
		}
		// Decode frame
		CHECK_HR_FRAME (Execute());
		CHECK_HR_FALSE (EndFrame(m_nSurfaceIndex));
	}

	if (got_picture) {
		AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, rtStart, rtStop);
		hr = DisplayNextFrame();
	}

	return hr;
}
Esempio n. 11
0
void CTriangle::Reset()
{
    m_iStepGen = m_iLinearCounter = 0;
    m_iEnabled = m_iControlReg = 0;
    m_iCounter = m_iLengthCounter = 0;

    Write(0, 0);
    Write(1, 0);
    Write(2, 0);
    Write(3, 0);

    EndFrame();
}
Esempio n. 12
0
void MySDLVU::Display()
{
  static GLUquadric * q = gluNewQuadric();
  BeginFrame();
  glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  glPushMatrix();
  glColor3f(0.95,0.95,0.95);
  gluSphere(q, 0.5, 100, 100);
  glRotatef(70, 1, 0, 0);
  glColor3f(0.6,0.6,0.6);
  gluDisk(q, 0.7, 1, 100, 100);
  glPopMatrix();
  EndFrame();
}
Esempio n. 13
0
// === Public functions
HRESULT CDXVADecoderMpeg2::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
	HRESULT						hr;
	int							nSurfaceIndex;
	CComPtr<IMediaSample>		pSampleToDeliver;
	int							nFieldType;
	int							nSliceType;

	FFMpeg2DecodeFrame (&m_PictureParams, &m_QMatrixData, m_SliceInfo, &m_nSliceCount, m_pFilter->GetAVCtx(), 
						m_pFilter->GetFrame(), &m_nNextCodecIndex, &nFieldType, &nSliceType, pDataIn, nSize);

	// Wait I frame after a flush
	if (m_bFlushed && ! m_PictureParams.bPicIntra)
		return S_FALSE;

	hr = GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop);
	if (FAILED (hr))
	{
		ASSERT (hr == VFW_E_NOT_COMMITTED);		// Normal when stop playing
		return hr;
	}

	CHECK_HR (BeginFrame(nSurfaceIndex, pSampleToDeliver));

	UpdatePictureParams(nSurfaceIndex);

	TRACE_MPEG2 ("=> %s   %I64d  Surf=%d\n", GetFFMpegPictureType(nSliceType), rtStart, nSurfaceIndex);

	TRACE_MPEG2("CDXVADecoderMpeg2 : Decode frame %i\n", m_PictureParams.bPicScanMethod);

	CHECK_HR (AddExecuteBuffer (DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));

	CHECK_HR (AddExecuteBuffer (DXVA2_InverseQuantizationMatrixBufferType, sizeof(m_QMatrixData), &m_QMatrixData));

	// Send bitstream to accelerator
	CHECK_HR (AddExecuteBuffer (DXVA2_SliceControlBufferType, sizeof (DXVA_SliceInfo)*m_nSliceCount, &m_SliceInfo));
	CHECK_HR (AddExecuteBuffer (DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));

	// Decode frame
	CHECK_HR (Execute());
	CHECK_HR (EndFrame(nSurfaceIndex));

	AddToStore (nSurfaceIndex, pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), rtStart, rtStop, 
				false,(FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx()));
	m_bFlushed = false;

	return DisplayNextFrame();
}
HRESULT CDXVADecoderMpeg2::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
	HRESULT	hr			= S_FALSE;
	bool	bIsField	= false;
	int		got_picture	= 0;

	memset(&m_DXVA_Context, 0, sizeof(DXVA_Context));
	CHECK_HR_FALSE (FFDecodeFrame(m_pFilter->GetAVCtx(), m_pFilter->GetFrame(),
								  pDataIn, nSize, rtStart,
								  &got_picture));

	if (m_nSurfaceIndex == -1 || !m_DXVA_Context.DXVA_MPEG2Context[0].slice_count) {
		return S_FALSE;
	}

	m_pFilter->HandleKeyFrame(got_picture);

	for (UINT i = 0; i < m_DXVA_Context.frame_count; i++) {
		DXVA_MPEG2_Context* pDXVA_MPEG2_ctx = &m_DXVA_Context.DXVA_MPEG2Context[i];

		if (!pDXVA_MPEG2_ctx->slice_count) {
			continue;
		}

		m_nFieldNum = i;
		UpdatePictureParams();

		CHECK_HR_FALSE (BeginFrame(m_nSurfaceIndex, m_pSampleToDeliver));
		// Send picture parameters
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(DXVA_PictureParameters), &pDXVA_MPEG2_ctx->DXVAPicParams));
		// Add quantization matrix
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(DXVA_QmatrixData), &pDXVA_MPEG2_ctx->DXVAScalingMatrix));
		// Add bitstream
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_BitStreamDateBufferType));
		// Add slice control
		CHECK_HR_FRAME (AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_SliceInfo) * pDXVA_MPEG2_ctx->slice_count, pDXVA_MPEG2_ctx->slice));
		// Decode frame
		CHECK_HR_FRAME (Execute());
		CHECK_HR_FALSE (EndFrame(m_nSurfaceIndex));
	}

	if (got_picture) {
		AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, rtStart, rtStop);
		hr = DisplayNextFrame();
	}
		
	return hr;
}
Esempio n. 15
0
/*
===========
idVertexCache::Init
===========
*/
void idVertexCache::Init()
{
	cmdSystem->AddCommand("listVertexCache", R_ListVertexCache_f, CMD_FL_RENDERER, "lists vertex cache");

	if (r_vertexBufferMegs.GetInteger() < 8) {
		r_vertexBufferMegs.SetInteger(8);
	}

	virtualMemory = false;

	// use ARB_vertex_buffer_object unless explicitly disabled
	if (r_useVertexBuffers.GetInteger() && glConfig.ARBVertexBufferObjectAvailable) {
		common->Printf("using ARB_vertex_buffer_object memory\n");
	} else {
		virtualMemory = true;
		r_useIndexBuffers.SetBool(false);
		common->Printf("WARNING: vertex array range in virtual memory (SLOW)\n");
	}

	// initialize the cache memory blocks
	freeStaticHeaders.next = freeStaticHeaders.prev = &freeStaticHeaders;
	staticHeaders.next = staticHeaders.prev = &staticHeaders;
	freeDynamicHeaders.next = freeDynamicHeaders.prev = &freeDynamicHeaders;
	dynamicHeaders.next = dynamicHeaders.prev = &dynamicHeaders;
	deferredFreeList.next = deferredFreeList.prev = &deferredFreeList;

	// set up the dynamic frame memory
	frameBytes = FRAME_MEMORY_BYTES;
	staticAllocTotal = 0;

	byte	*junk = (byte *)Mem_Alloc(frameBytes);

	for (int i = 0 ; i < NUM_VERTEX_FRAMES ; i++) {
		allocatingTempBuffer = true;	// force the alloc to use GL_STREAM_DRAW_ARB
		Alloc(junk, frameBytes, &tempBuffers[i]);
		allocatingTempBuffer = false;
		tempBuffers[i]->tag = TAG_FIXED;
		// unlink these from the static list, so they won't ever get purged
		tempBuffers[i]->next->prev = tempBuffers[i]->prev;
		tempBuffers[i]->prev->next = tempBuffers[i]->next;
	}

	Mem_Free(junk);

	EndFrame();
}
Esempio n. 16
0
void CDPCM::Reset()
{
	m_iCounter = m_iPeriod = DMC_PERIODS_NTSC[0];

	m_iBitDivider = m_iShiftReg = 0;
	m_iDMA_LoadReg = 0;
	m_iDMA_LengthReg = 0;
	m_iDMA_Address = 0;
	m_iDMA_BytesRemaining = 0;
	
	m_bTriggeredIRQ = m_bSampleFilled = false;

	// loaded with 0 on power-up.
	m_iDeltaCounter = 0;

	EndFrame();
}
Esempio n. 17
0
void RAS_OpenGLRasterizer::Exit()
{
	m_storage->Exit();

	glEnable(GL_CULL_FACE);
	glEnable(GL_DEPTH_TEST);
	glClearDepth(1.0f);
	glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
	glClearColor(m_redback, m_greenback, m_blueback, m_alphaback);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	glDepthMask (GL_TRUE);
	glDepthFunc(GL_LEQUAL);
	glBlendFunc(GL_ONE, GL_ZERO);
	
	glDisable(GL_POLYGON_STIPPLE);
	
	glDisable(GL_LIGHTING);
	if (GLEW_EXT_separate_specular_color || GLEW_VERSION_1_2)
		glLightModeli(GL_LIGHT_MODEL_COLOR_CONTROL, GL_SINGLE_COLOR);
	
	EndFrame();
}
Esempio n. 18
0
//----------------------------------------------------------------//
void MOAIProfilerContext::BeginFrame ( const bool profilingEnabled ) {
	
	if ( InFrame () ) {
		EndFrame ();
	}

	if ( mProfilingEnabled != profilingEnabled )
		mProfilingEnabled = profilingEnabled;

	if ( !mProfilingEnabled )
		return;

	// Select the next frame
	mCurrentFrameIdx = ( mCurrentFrameIdx + 1 ) % NUM_FRAMES;
	Frame& curFrame = mFrames [ mCurrentFrameIdx ];

	curFrame.mNumScopesEntered = 0;

	curFrame.mCurrentScope = &curFrame.mRootScope;
	curFrame.mCurrentScope->Enter ( 0, curFrame.mCurrentScope->mName );

	curFrame.mLastScope = curFrame.mCurrentScope;
}
// Render the scene.
void D3D12Multithreading::OnRender()
{
	BeginFrame();

#if SINGLETHREADED
	for (int i = 0; i < NumContexts; i++)
	{
		WorkerThread(reinterpret_cast<LPVOID>(i));
	}
	MidFrame();
	EndFrame();
	m_commandQueue->ExecuteCommandLists(_countof(m_pCurrentFrameResource->m_batchSubmit), m_pCurrentFrameResource->m_batchSubmit);
#else
	for (int i = 0; i < NumContexts; i++)
	{
		SetEvent(m_workerBeginRenderFrame[i]); // Tell each worker to start drawing.
	}

	MidFrame();
	EndFrame();

	WaitForMultipleObjects(NumContexts, m_workerFinishShadowPass, TRUE, INFINITE);

	// You can execute command lists on any thread. Depending on the work 
	// load, apps can choose between using ExecuteCommandLists on one thread 
	// vs ExecuteCommandList from multiple threads.
	m_commandQueue->ExecuteCommandLists(NumContexts + 2, m_pCurrentFrameResource->m_batchSubmit); // Submit PRE, MID and shadows.

	WaitForMultipleObjects(NumContexts, m_workerFinishedRenderFrame, TRUE, INFINITE);

	// Submit remaining command lists.
	m_commandQueue->ExecuteCommandLists(_countof(m_pCurrentFrameResource->m_batchSubmit) - NumContexts - 2, m_pCurrentFrameResource->m_batchSubmit + NumContexts + 2);
#endif

	m_cpuTimer.Tick(NULL);
	if (m_titleCount == TitleThrottle)
	{
		WCHAR cpu[64];
		swprintf_s(cpu, L"%.4f CPU", m_cpuTime / m_titleCount);
		SetCustomWindowText(cpu);

		m_titleCount = 0;
		m_cpuTime = 0;
	}
	else
	{
		m_titleCount++;
		m_cpuTime += m_cpuTimer.GetElapsedSeconds() * 1000;
		m_cpuTimer.ResetElapsedTime();
	}

	// Present and update the frame index for the next frame.
	PIXBeginEvent(m_commandQueue.Get(), 0, L"Presenting to screen");
	ThrowIfFailed(m_swapChain->Present(0, 0));
	PIXEndEvent(m_commandQueue.Get());
	m_frameIndex = m_swapChain->GetCurrentBackBufferIndex();

	// Signal and increment the fence value.
	m_pCurrentFrameResource->m_fenceValue = m_fenceValue;
	ThrowIfFailed(m_commandQueue->Signal(m_fence.Get(), m_fenceValue));
	m_fenceValue++;
}
Esempio n. 20
0
void KX_KetsjiEngine::Render()
{
	if(m_usedome){
		RenderDome();
		return;
	}
	KX_Scene* firstscene = *m_scenes.begin();
	const RAS_FrameSettings &framesettings = firstscene->GetFramingType();

	m_logger->StartLog(tc_rasterizer, m_kxsystem->GetTimeInSeconds(), true);
	SG_SetActiveStage(SG_STAGE_RENDER);

	// hiding mouse cursor each frame
	// (came back when going out of focus and then back in again)
	if (m_hideCursor)
		m_canvas->SetMouseState(RAS_ICanvas::MOUSE_INVISIBLE);

	// clear the entire game screen with the border color
	// only once per frame
	m_canvas->BeginDraw();
	if (m_drawingmode == RAS_IRasterizer::KX_TEXTURED) {
		m_canvas->SetViewPort(0, 0, m_canvas->GetWidth(), m_canvas->GetHeight());
		if (m_overrideFrameColor)
		{
			// Do not use the framing bar color set in the Blender scenes
			m_canvas->ClearColor(
				m_overrideFrameColorR,
				m_overrideFrameColorG,
				m_overrideFrameColorB,
				1.0
				);
		}
		else
		{
			// Use the framing bar color set in the Blender scenes
			m_canvas->ClearColor(
				framesettings.BarRed(),
				framesettings.BarGreen(),
				framesettings.BarBlue(),
				1.0
				);
		}
		// clear the -whole- viewport
		m_canvas->ClearBuffer(RAS_ICanvas::COLOR_BUFFER|RAS_ICanvas::DEPTH_BUFFER);
	}

	m_rasterizer->SetEye(RAS_IRasterizer::RAS_STEREO_LEFTEYE);

	// BeginFrame() sets the actual drawing area. You can use a part of the window
	if (!BeginFrame())
		return;

	KX_SceneList::iterator sceneit;
	for (sceneit = m_scenes.begin();sceneit != m_scenes.end(); sceneit++)
	// for each scene, call the proceed functions
	{
		KX_Scene* scene = *sceneit;
		KX_Camera* cam = scene->GetActiveCamera();
		// pass the scene's worldsettings to the rasterizer
		SetWorldSettings(scene->GetWorldInfo());

		// this is now done incrementatlly in KX_Scene::CalculateVisibleMeshes
		//scene->UpdateMeshTransformations();

		// shadow buffers
		RenderShadowBuffers(scene);

		// Avoid drawing the scene with the active camera twice when it's viewport is enabled
		if(cam && !cam->GetViewport())
		{
			if (scene->IsClearingZBuffer())
				m_rasterizer->ClearDepthBuffer();
	
			m_rendertools->SetAuxilaryClientInfo(scene);
	
			// do the rendering
			RenderFrame(scene, cam);
		}
		
		list<class KX_Camera*>* cameras = scene->GetCameras();
		
		// Draw the scene once for each camera with an enabled viewport
		list<KX_Camera*>::iterator it = cameras->begin();
		while(it != cameras->end())
		{
			if((*it)->GetViewport())
			{
				if (scene->IsClearingZBuffer())
					m_rasterizer->ClearDepthBuffer();
		
				m_rendertools->SetAuxilaryClientInfo(scene);
		
				// do the rendering
				RenderFrame(scene, (*it));
			}
			
			it++;
		}
	}

	// only one place that checks for stereo
	if(m_rasterizer->Stereo())
	{
		m_rasterizer->SetEye(RAS_IRasterizer::RAS_STEREO_RIGHTEYE);

		if (!BeginFrame())
			return;


		for (sceneit = m_scenes.begin();sceneit != m_scenes.end(); sceneit++)
		// for each scene, call the proceed functions
		{
			KX_Scene* scene = *sceneit;
			KX_Camera* cam = scene->GetActiveCamera();

			// pass the scene's worldsettings to the rasterizer
			SetWorldSettings(scene->GetWorldInfo());
		
			if (scene->IsClearingZBuffer())
				m_rasterizer->ClearDepthBuffer();

			//pass the scene, for picking and raycasting (shadows)
			m_rendertools->SetAuxilaryClientInfo(scene);

			// do the rendering
			//RenderFrame(scene);
			RenderFrame(scene, cam);

			list<class KX_Camera*>* cameras = scene->GetCameras();			
	
			// Draw the scene once for each camera with an enabled viewport
			list<KX_Camera*>::iterator it = cameras->begin();
			while(it != cameras->end())
			{
				if((*it)->GetViewport())
				{
					if (scene->IsClearingZBuffer())
						m_rasterizer->ClearDepthBuffer();
			
					m_rendertools->SetAuxilaryClientInfo(scene);
			
					// do the rendering
					RenderFrame(scene, (*it));
				}
				
				it++;
			}
		}
	} // if(m_rasterizer->Stereo())

	EndFrame();
}
Esempio n. 21
0
void KX_KetsjiEngine::RenderDome()
{
	GLuint	viewport[4]={0};
	glGetIntegerv(GL_VIEWPORT,(GLint *)viewport);
	
	m_dome->SetViewPort(viewport);

	KX_Scene* firstscene = *m_scenes.begin();
	const RAS_FrameSettings &framesettings = firstscene->GetFramingType();

	m_logger->StartLog(tc_rasterizer, m_kxsystem->GetTimeInSeconds(), true);

	// hiding mouse cursor each frame
	// (came back when going out of focus and then back in again)
	if (m_hideCursor)
		m_canvas->SetMouseState(RAS_ICanvas::MOUSE_INVISIBLE);

	// clear the entire game screen with the border color
	// only once per frame

	m_canvas->BeginDraw();

	// BeginFrame() sets the actual drawing area. You can use a part of the window
	if (!BeginFrame())
		return;

	KX_SceneList::iterator sceneit;

	int n_renders=m_dome->GetNumberRenders();// usually 4 or 6
	for (int i=0;i<n_renders;i++){
		m_canvas->ClearBuffer(RAS_ICanvas::COLOR_BUFFER|RAS_ICanvas::DEPTH_BUFFER);
		for (sceneit = m_scenes.begin();sceneit != m_scenes.end(); sceneit++)
		// for each scene, call the proceed functions
		{
			KX_Scene* scene = *sceneit;
			KX_Camera* cam = scene->GetActiveCamera();

			m_rendertools->BeginFrame(m_rasterizer);
			// pass the scene's worldsettings to the rasterizer
			SetWorldSettings(scene->GetWorldInfo());

			// shadow buffers
			if (i == 0){
				RenderShadowBuffers(scene);
			}
			// Avoid drawing the scene with the active camera twice when it's viewport is enabled
			if(cam && !cam->GetViewport())
			{
				if (scene->IsClearingZBuffer())
					m_rasterizer->ClearDepthBuffer();
		
				m_rendertools->SetAuxilaryClientInfo(scene);
		
				// do the rendering
				m_dome->RenderDomeFrame(scene,cam, i);
			}
			
			list<class KX_Camera*>* cameras = scene->GetCameras();
			
			// Draw the scene once for each camera with an enabled viewport
			list<KX_Camera*>::iterator it = cameras->begin();
			while(it != cameras->end())
			{
				if((*it)->GetViewport())
				{
					if (scene->IsClearingZBuffer())
						m_rasterizer->ClearDepthBuffer();
			
					m_rendertools->SetAuxilaryClientInfo(scene);
			
					// do the rendering
					m_dome->RenderDomeFrame(scene, (*it),i);
				}
				
				it++;
			}
		}
		m_dome->BindImages(i);
	}	

	m_canvas->EndFrame();//XXX do we really need that?

	m_canvas->SetViewPort(0, 0, m_canvas->GetWidth(), m_canvas->GetHeight());

	if (m_overrideFrameColor) //XXX why do we want
	{
		// Do not use the framing bar color set in the Blender scenes
		m_canvas->ClearColor(
			m_overrideFrameColorR,
			m_overrideFrameColorG,
			m_overrideFrameColorB,
			1.0
			);
	}
	else
	{
		// Use the framing bar color set in the Blender scenes
		m_canvas->ClearColor(
			framesettings.BarRed(),
			framesettings.BarGreen(),
			framesettings.BarBlue(),
			1.0
			);
	}

	m_dome->Draw();

	// run the 2dfilters and motion blur once for all the scenes
	PostRenderFrame();
	EndFrame();
}
Esempio n. 22
0
void eae6320::Graphics::Render()
{
	ClearFrame();
	StartFrame();

	// Drawing Opaque list
	s_boxes_obj->DrawObject();
	s_ceiling_obj->DrawObject();
	s_floor_obj->DrawObject();
	s_innerWalls_obj->DrawObject();
	s_metal_obj->DrawObject();
	s_outerWalls_obj->DrawObject();
	s_railing_obj->DrawObject();

	// Drawing Transparent list
	//


	// Drawing Debug Shapes
#ifdef _DEBUG
	s_debugLine1.DrawLine();
	s_debugLine2.DrawLine();
	s_snowmanLine->DrawLine();
	s_debugBox1.DrawBox();
	s_debugBox2.DrawBox();

	if (s_debugMenuCheckBox->m_isChecked)
	{
		s_debugSphere1->DrawSphere();
		//s_debugSphere2.DrawSphere();
	}

	Graphics::GetLocalDirect3dDevice()->SetRenderState(D3DRS_FILLMODE, D3DFILL_WIREFRAME);
	s_debugCylinder1->DrawObject();
	s_debugCylinder2->DrawObject();
	Graphics::GetLocalDirect3dDevice()->SetRenderState(D3DRS_FILLMODE, D3DFILL_SOLID);
#endif

	// Drawing Game Sprites
	//s_logo.Draw();
	s_numbers->Draw();

	// Drawing third person snowman
	s_snowman->DrawObject();
	s_snowmanClient->DrawObject();

	// Draw Capture the Flag Stuff
	s_flag1->DrawObject();
	s_flag2->DrawObject();
	if(s_bullet1->m_isActive)
		s_bullet1->DrawObject();
	if (s_bullet2->m_isActive)
		s_bullet2->DrawObject();
	s_sprintBar->DrawDebugBar(150);
	s_snowmanScore->DrawDebugText(100);
	if(s_clientJoined)
		s_snowmanClientScore->DrawDebugText(100);

	// Drawing Debug Menu Items
#ifdef _DEBUG
	if (s_debugMenuEnabled)
	{
		float fpsCount = 1 / Time::GetSecondsElapsedThisFrame();
		s_debugMenuTextFPS.SetFPS(fpsCount);

		switch (s_activeMenuItem)
		{
		case DebugMenuSelection::Text:
			s_debugMenuTextFPS.DrawDebugText(255);
			s_debugMenuCheckBox->DrawDebugCheckBox(0);
			s_debugMenuSlider->DrawDebugSlider(0);
			s_debugMenuButton->DrawDebugButton(0);
			s_toggleFPSCheckBox->DrawDebugCheckBox(0);
			break;

		case DebugMenuSelection::CheckBox:
			s_debugMenuTextFPS.DrawDebugText(0);
			s_debugMenuCheckBox->DrawDebugCheckBox(255);
			s_debugMenuSlider->DrawDebugSlider(0);
			s_debugMenuButton->DrawDebugButton(0);
			s_toggleFPSCheckBox->DrawDebugCheckBox(0);
			break;

		case DebugMenuSelection::Slider:
			s_debugMenuTextFPS.DrawDebugText(0);
			s_debugMenuCheckBox->DrawDebugCheckBox(0);
			s_debugMenuSlider->DrawDebugSlider(255);
			s_debugMenuButton->DrawDebugButton(0);
			s_toggleFPSCheckBox->DrawDebugCheckBox(0);
			break;

		case DebugMenuSelection::Button:
			s_debugMenuTextFPS.DrawDebugText(0);
			s_debugMenuCheckBox->DrawDebugCheckBox(0);
			s_debugMenuSlider->DrawDebugSlider(0);
			s_debugMenuButton->DrawDebugButton(255);
			s_toggleFPSCheckBox->DrawDebugCheckBox(0);
			break;

		case DebugMenuSelection::ToggleCam:
			s_debugMenuTextFPS.DrawDebugText(0);
			s_debugMenuCheckBox->DrawDebugCheckBox(0);
			s_debugMenuSlider->DrawDebugSlider(0);
			s_debugMenuButton->DrawDebugButton(0);
			s_toggleFPSCheckBox->DrawDebugCheckBox(255);
			break;

		default:
			break;
		}
	}
#endif

	EndFrame();
	ShowFrame();
}
Esempio n. 23
0
HRESULT CDXVADecoderH264::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
    HRESULT hr = S_FALSE;
    UINT nSlices = 0;
    int nSurfaceIndex = -1;
    int nFieldType = -1;
    int nSliceType = -1;
    int nFramePOC = INT_MIN;
    int nOutPOC = INT_MIN;
    REFERENCE_TIME rtOutStart = _I64_MIN;
    CH264Nalu Nalu;
    UINT nNalOffset = 0;
    CComPtr<IMediaSample> pSampleToDeliver;
    CComQIPtr<IMPCDXVA2Sample> pDXVA2Sample;
    int slice_step = 1;

    if (FFH264DecodeBuffer(m_pFilter->GetAVCtx(), pDataIn, nSize, &nFramePOC, &nOutPOC, &rtOutStart) == -1) {
        return S_FALSE;
    }

    while (!nSlices && slice_step <= 2) {
        Nalu.SetBuffer(pDataIn, nSize, slice_step == 1 ? m_nNALLength : 0);
        while (Nalu.ReadNext()) {
            switch (Nalu.GetType()) {
                case NALU_TYPE_SLICE:
                case NALU_TYPE_IDR:
                    if (m_bUseLongSlice) {
                        m_pSliceLong[nSlices].BSNALunitDataLocation = nNalOffset;
                        m_pSliceLong[nSlices].SliceBytesInBuffer = (UINT)Nalu.GetDataLength() + 3; //.GetRoundedDataLength();
                        m_pSliceLong[nSlices].slice_id = nSlices;
                        FF264UpdateRefFrameSliceLong(&m_DXVAPicParams, &m_pSliceLong[nSlices], m_pFilter->GetAVCtx());

                        if (nSlices > 0) {
                            m_pSliceLong[nSlices - 1].NumMbsForSlice = m_pSliceLong[nSlices].NumMbsForSlice = m_pSliceLong[nSlices].first_mb_in_slice - m_pSliceLong[nSlices - 1].first_mb_in_slice;
                        }
                    }
                    nSlices++;
                    nNalOffset += (UINT)(Nalu.GetDataLength() + 3);
                    if (nSlices > MAX_SLICES) {
                        break;
                    }
                    break;
            }
        }
        slice_step++;
    }

    if (!nSlices) {
        return S_FALSE;
    }

    m_nMaxWaiting = min(max(m_DXVAPicParams.num_ref_frames, 3), 8);

    // If parsing fail (probably no PPS/SPS), continue anyway it may arrived later (happen on truncated streams)
    if (FAILED(FFH264BuildPicParams(&m_DXVAPicParams, &m_DXVAScalingMatrix, &nFieldType, &nSliceType, m_pFilter->GetAVCtx(), m_pFilter->GetPCIVendor()))) {
        return S_FALSE;
    }

    TRACE_H264("CDXVADecoderH264::DecodeFrame() : nFramePOC = %11d, nOutPOC = %11d[%11d], [%d - %d], rtOutStart = [%20I64d]\n", nFramePOC, nOutPOC, m_nOutPOC, m_DXVAPicParams.field_pic_flag, m_DXVAPicParams.RefPicFlag, rtOutStart);

    // Wait I frame after a flush
    if (m_bFlushed && !m_DXVAPicParams.IntraPicFlag) {
        TRACE_H264("CDXVADecoderH264::DecodeFrame() : Flush - wait I frame\n");
        m_nBrokenFramesFlag = 0;
        m_nBrokenFramesFlag_POC = 0;
        m_nfield_pic_flag = m_DXVAPicParams.field_pic_flag;
        m_nRefPicFlag = m_DXVAPicParams.RefPicFlag;
        m_nPrevOutPOC = INT_MIN;
        return S_FALSE;
    }

    /* Disabled, because that causes serious problems.
        // Some magic code for detecting the incorrect decoding of interlaced frames ...
        // TODO : necessary to make it better, and preferably on the side of ffmpeg ...
        if (m_nfield_pic_flag && m_nfield_pic_flag == m_DXVAPicParams.field_pic_flag && m_nRefPicFlag == m_DXVAPicParams.RefPicFlag) {
            if (m_nPrevOutPOC == m_nOutPOC && m_nOutPOC == INT_MIN) {
                m_nBrokenFramesFlag_POC++;
            }
            m_nBrokenFramesFlag++;
        } else {
            m_nBrokenFramesFlag     = 0;
            m_nBrokenFramesFlag_POC = 0;
        }
        m_nfield_pic_flag   = m_DXVAPicParams.field_pic_flag;
        m_nRefPicFlag       = m_DXVAPicParams.RefPicFlag;
        m_nPrevOutPOC       = m_nOutPOC;

        if (m_nBrokenFramesFlag > 4) {
            m_nBrokenFramesFlag = 0;
            if (m_nBrokenFramesFlag_POC > 1) {
                TRACE_H264("CDXVADecoderH264::DecodeFrame() : Detected broken frames ... flush data\n");
                m_nBrokenFramesFlag_POC = 0;
                Flush();
                return S_FALSE;
            }
        }
        //
    */

    CHECK_HR_TRACE(GetFreeSurfaceIndex(nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop));
    FFH264SetCurrentPicture(nSurfaceIndex, &m_DXVAPicParams, m_pFilter->GetAVCtx());

    CHECK_HR_TRACE(BeginFrame(nSurfaceIndex, pSampleToDeliver));

    m_DXVAPicParams.StatusReportFeedbackNumber++;

    // Send picture parameters
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_DXVAPicParams), &m_DXVAPicParams));
    CHECK_HR_TRACE(Execute());

    // Add bitstream, slice control and quantization matrix
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));

    if (m_bUseLongSlice) {
        CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Long)*nSlices, m_pSliceLong));
    } else {
        CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Short)*nSlices, m_pSliceShort));
    }

    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(DXVA_Qmatrix_H264), (void*)&m_DXVAScalingMatrix));

    // Decode bitstream
    CHECK_HR_TRACE(Execute());
    CHECK_HR_TRACE(EndFrame(nSurfaceIndex));

#if defined(_DEBUG) && 0
    DisplayStatus();
#endif

    bool bAdded = AddToStore(nSurfaceIndex, pSampleToDeliver, m_DXVAPicParams.RefPicFlag, rtStart, rtStop,
                             m_DXVAPicParams.field_pic_flag, (FF_FIELD_TYPE)nFieldType,
                             (FF_SLICE_TYPE)nSliceType, nFramePOC);

    FFH264UpdateRefFramesList(&m_DXVAPicParams, m_pFilter->GetAVCtx());
    ClearUnusedRefFrames();

    if (bAdded) {
        hr = DisplayNextFrame();
    }

    if (nOutPOC != INT_MIN) {
        m_nOutPOC = nOutPOC;
        m_rtOutStart = rtOutStart;
    }

    m_bFlushed = false;
    return hr;
}
Esempio n. 24
0
int Scene::NumFrames(void)
{
	return EndFrame() - m_nextframe;
}
Esempio n. 25
0
HRESULT CDXVADecoderH264::DecodeFrame (BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{

	HRESULT						hr			= S_FALSE;
  
	CH264Nalu				Nalu;
	UINT						nSlices		= 0;
	int							nSurfaceIndex;
	int							nFieldType;
	int							nSliceType;
	int							nFramePOC;
  IDirect3DSurface9* pSampleToDeliver;
	int							nDXIndex	= 0;
	UINT						nNalOffset	= 0;
	int							nOutPOC;
	REFERENCE_TIME				rtOutStart;

  if(pDataIn == NULL || nSize == 0)
    return S_FALSE;

	Nalu.SetBuffer (pDataIn, nSize, m_nNALLength); 
	FFH264DecodeBuffer (m_pFilter->GetAVCtx(), pDataIn, nSize, &nFramePOC, &nOutPOC, &rtOutStart);

  //CLog::Log(LOGDEBUG, "nFramePOC = %d nOutPOC %d rtOutStart%d", nFramePOC, nOutPOC, rtOutStart);

	while (Nalu.ReadNext())
	{
		switch (Nalu.GetType())
		{
		case NALU_TYPE_SLICE:
		case NALU_TYPE_IDR:
				if(m_bUseLongSlice) 
				{
					m_pSliceLong[nSlices].BSNALunitDataLocation	= nNalOffset;
					m_pSliceLong[nSlices].SliceBytesInBuffer	= Nalu.GetDataLength()+3; //.GetRoundedDataLength();
					m_pSliceLong[nSlices].slice_id				= nSlices;
					FF264UpdateRefFrameSliceLong(&m_DXVAPicParams, &m_pSliceLong[nSlices], m_pFilter->GetAVCtx());

					if (nSlices>0)
						m_pSliceLong[nSlices-1].NumMbsForSlice = m_pSliceLong[nSlices].NumMbsForSlice = m_pSliceLong[nSlices].first_mb_in_slice - m_pSliceLong[nSlices-1].first_mb_in_slice;
				}
				nSlices++; 
				nNalOffset += (UINT)(Nalu.GetDataLength() + 3);
				if (nSlices > MAX_SLICES) break;
				break;
		}
	}
	if (nSlices == 0) return S_FALSE;

	m_nMaxWaiting	= min (max (m_DXVAPicParams.num_ref_frames, 3), 8);

	// If parsing fail (probably no PPS/SPS), continue anyway it may arrived later (happen on truncated streams)
	if (FAILED (FFH264BuildPicParams (&m_DXVAPicParams, &m_DXVAScalingMatrix, &nFieldType, &nSliceType, m_pFilter->GetAVCtx(), m_pFilter->GetPCIVendor())))
		return S_FALSE;

	// Wait I frame after a flush
	if (m_bFlushed && !m_DXVAPicParams.IntraPicFlag)
		return S_FALSE;
	
	CHECK_HR (GetFreeSurfaceIndex (nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop));

	FFH264SetCurrentPicture (nSurfaceIndex, &m_DXVAPicParams, m_pFilter->GetAVCtx());

	CHECK_HR (BeginFrame(pSampleToDeliver));
	
	m_DXVAPicParams.StatusReportFeedbackNumber++;

//	TRACE("CDXVADecoderH264 : Decode frame %u\n", m_DXVAPicParams.StatusReportFeedbackNumber);

	// Send picture parameters
	CHECK_HR (AddExecuteBuffer (DXVA2_PictureParametersBufferType, sizeof(m_DXVAPicParams), &m_DXVAPicParams));
	CHECK_HR (Execute());

	// Add bitstream, slice control and quantization matrix
	CHECK_HR (AddExecuteBuffer (DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));

	if (m_bUseLongSlice)
	{
		CHECK_HR(AddExecuteBuffer(DXVA2_SliceControlBufferType,  sizeof(DXVA_Slice_H264_Long)*nSlices, m_pSliceLong));
	}
	else
	{
		CHECK_HR (AddExecuteBuffer (DXVA2_SliceControlBufferType, sizeof (DXVA_Slice_H264_Short)*nSlices, m_pSliceShort));
	}

	CHECK_HR (AddExecuteBuffer (DXVA2_InverseQuantizationMatrixBufferType, sizeof (DXVA_Qmatrix_H264), (void*)&m_DXVAScalingMatrix));

	// Decode bitstream
	CHECK_HR (Execute());

	CHECK_HR (EndFrame(nSurfaceIndex));

#ifdef _DEBUG
	//DisplayStatus();
#endif

	bool bAdded		= AddToStore (nSurfaceIndex, m_DXVAPicParams.RefPicFlag, rtStart, rtStop,
								  m_DXVAPicParams.field_pic_flag, (FF_FIELD_TYPE)nFieldType, 
								  (FF_SLICE_TYPE)nSliceType, nFramePOC);

	FFH264UpdateRefFramesList (&m_DXVAPicParams, m_pFilter->GetAVCtx());

	ClearUnusedRefFrames();

	if (bAdded) 
	{
		hr = DisplayNextFrame();

		if (nOutPOC != -1)
		{
			m_nOutPOC		= nOutPOC;
			m_rtOutStart	= rtOutStart;
		}
	}
	m_bFlushed		= false;
 
	return hr;
}
Esempio n. 26
0
HRESULT CDXVADecoderH264_DXVA1::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
	HRESULT					hr					= S_FALSE;
	int						nSurfaceIndex		= -1;
	int						nFramePOC			= INT_MIN;
	int						nOutPOC				= INT_MIN;
	REFERENCE_TIME			rtOutStart			= INVALID_TIME;
	CH264Nalu				Nalu;
	CComPtr<IMediaSample>	pSampleToDeliver;

	CHECK_HR_FALSE (FFH264DecodeFrame(m_pFilter->GetAVCtx(), m_pFilter->GetFrame(), pDataIn, nSize, rtStart,
					&nFramePOC, &nOutPOC, &rtOutStart, &m_nNALLength));

	// If parsing fail (probably no PPS/SPS), continue anyway it may arrived later (happen on truncated streams)
	CHECK_HR_FALSE (FFH264BuildPicParams(m_pFilter->GetAVCtx(), &m_DXVAPicParams, &m_DXVAScalingMatrix, m_IsATIUVD));

	TRACE_H264 ("CDXVADecoderH264_DXVA1::DecodeFrame() : nFramePOC = %11d, nOutPOC = %11d[%11d], [%d - %d], rtOutStart = [%20I64d]\n", nFramePOC, nOutPOC, m_nOutPOC, m_DXVAPicParams.field_pic_flag, m_DXVAPicParams.RefPicFlag, rtOutStart);

	// Wait I frame after a flush
	if (m_bFlushed && !m_DXVAPicParams.IntraPicFlag) {
		TRACE_H264 ("CDXVADecoderH264_DXVA1::DecodeFrame() : Flush - wait I frame\n");
		return S_FALSE;
	}

	CHECK_HR_FALSE (GetFreeSurfaceIndex(nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop));
	FFH264SetCurrentPicture(nSurfaceIndex, &m_DXVAPicParams, m_pFilter->GetAVCtx());

	{
		m_DXVAPicParams.StatusReportFeedbackNumber++;

		CHECK_HR_FALSE (BeginFrame(nSurfaceIndex, pSampleToDeliver));
		// Send picture parameters
		CHECK_HR_FALSE (AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_DXVAPicParams), &m_DXVAPicParams));
		// Add bitstream
		CHECK_HR_FALSE (AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize, pDataIn));
		// Add quantization matrix
		CHECK_HR_FALSE (AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(DXVA_Qmatrix_H264), &m_DXVAScalingMatrix));
		// Add slice control
		CHECK_HR_FALSE (AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_Slice_H264_Short) * m_nSlices, m_pSliceShort));
		// Decode frame
		CHECK_HR_FALSE (Execute());
		CHECK_HR_FALSE (EndFrame(nSurfaceIndex));
	}

	bool bAdded = AddToStore(nSurfaceIndex, pSampleToDeliver, m_DXVAPicParams.RefPicFlag, rtStart, rtStop,
							 m_DXVAPicParams.field_pic_flag, nFramePOC);


	FFH264UpdateRefFramesList(&m_DXVAPicParams, m_pFilter->GetAVCtx());
	ClearUnusedRefFrames();

	if (bAdded) {
		hr = DisplayNextFrame();
	}

	if (nOutPOC != INT_MIN) {
		m_nOutPOC		= nOutPOC;
		m_rtOutStart	= rtOutStart;
	}

	m_bFlushed = false;
	return hr;
}
void CInputManagerImplementation::LoadCommandsFromFile(const std::string& path)
{
	m_FileName = path;

	m_Actions.clear();
	m_Axis.clear();


	CXMLTreeNode l_XML;
	if (l_XML.LoadFile(path.c_str()))
	{
		CXMLTreeNode l_Input = l_XML["input"];
		if (l_Input.Exists())
		{
			for (int i = 0; i < l_Input.GetNumChildren(); ++i)
			{
				CXMLTreeNode l_Element = l_Input(i);

				if (l_Element.GetName() == std::string("action"))
				{
					CXMLTreeNode &l_Action = l_Element;

					Action action = {};
					action.name = l_Action.GetPszProperty("name");
					std::string type = l_Action.GetPszProperty("type", "KEYBOARD");
					action.inputType = ParseInputType(type);
					action.mode = ParseMode(l_Action.GetPszProperty("mode", "ON_PRESS", false));

					action.triggersAxis = l_Action.GetBoolProperty("triggers_axis", false, false);
					action.axisName = l_Action.GetPszProperty("axis", "", false);
					action.axisValue = l_Action.GetFloatProperty("axis_value", 1, false);

					switch (action.inputType)
					{
					case KEYBOARD:

						action.keyboard.key = l_Action.GetPszProperty("key", "A")[0];
						action.keyboard.needsAlt = l_Action.GetBoolProperty("needs_alt", false, false);
						action.keyboard.needsCtrl = l_Action.GetBoolProperty("needs_ctrl", false, false);
						break;
					case MOUSE:
						action.mouse.button = ParseMouseButton(l_Action.GetPszProperty("mouse_button", "LEFT"));
						break;
						// TODO: Mouse y Gamepad

						// Pista: para parsear botones del gamepad, usad las constantes:
						//   podeis tener más de una a la vez usando "XINPUT_GAMEPAD_A | XINPUT_GAMEPAD_B", por ejemplo.
						//   pero para que eso funcione bién con ON_PRESS tendréis que mejorar la lógica de esta clase o ser frame perfect pulsando botones

						//  XINPUT_GAMEPAD_DPAD_UP       
						//  XINPUT_GAMEPAD_DPAD_DOWN     
						//  XINPUT_GAMEPAD_DPAD_LEFT     
						//  XINPUT_GAMEPAD_DPAD_RIGHT    
						//  XINPUT_GAMEPAD_START         
						//  XINPUT_GAMEPAD_BACK          
						//  XINPUT_GAMEPAD_LEFT_THUMB    
						//  XINPUT_GAMEPAD_RIGHT_THUMB   
						//  XINPUT_GAMEPAD_LEFT_SHOULDER 
						//  XINPUT_GAMEPAD_RIGHT_SHOULDER
						//  XINPUT_GAMEPAD_A             
						//  XINPUT_GAMEPAD_B             
						//  XINPUT_GAMEPAD_X             
						//  XINPUT_GAMEPAD_Y             
						// 
					}

					m_Actions.push_back(action);
				}
				else if (l_Element.GetName() == std::string("axis"))
				{

					CXMLTreeNode &l_Axis = l_Element;

					Axis axis = {};
					axis.name = l_Axis.GetPszProperty("name");
					std::string type = l_Axis.GetPszProperty("type", "MOUSE");
					axis.inputType = ParseInputType(type);

					switch (axis.inputType)
					{
					case GAMEPAD:
						axis.gamepad.axis = ParseGamepadAxis(l_Axis.GetPszProperty("gamepad_axis", "X"));
						axis.gamepad.gamepadNumber = l_Axis.GetIntProperty("gamepad_number", 0, false);
						break;
					case MOUSE:
						axis.mouse.axis = ParseMouseAxis(l_Axis.GetPszProperty("mouse_axis", "X"));
						axis.mouse.scale = l_Axis.GetFloatProperty("mouse_scale", 1, false);
						break;
					}

					m_Axis.push_back(axis);
					// TODO: parse axis
				}

			}
		}
	}

	EndFrame();
}
Esempio n. 28
0
// === Public functions
HRESULT CDXVADecoderMpeg2::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
    HRESULT hr;
    int nFieldType;
    int nSliceType;

    FFMpeg2DecodeFrame(&m_PictureParams, &m_QMatrixData, m_SliceInfo, &m_nSliceCount, m_pFilter->GetAVCtx(),
                       m_pFilter->GetFrame(), &m_nNextCodecIndex, &nFieldType, &nSliceType, pDataIn, nSize);

    if (m_PictureParams.bSecondField && !m_bSecondField) {
        m_bSecondField = true;
    }

    // Wait I frame after a flush
    if (m_bFlushed && (!m_PictureParams.bPicIntra || (m_bSecondField && m_PictureParams.bSecondField))) {
        TRACE_MPEG2("CDXVADecoderMpeg2::DecodeFrame() : Flush - wait I frame\n");
        return S_FALSE;
    }

    if (m_bSecondField) {
        if (!m_PictureParams.bSecondField) {
            m_rtStart = rtStart;
            m_rtStop  = rtStop;
            m_pSampleToDeliver = NULL;
            hr = GetFreeSurfaceIndex(m_nSurfaceIndex, &m_pSampleToDeliver, rtStart, rtStop);
            if (FAILED(hr)) {
                ASSERT(hr == VFW_E_NOT_COMMITTED);      // Normal when stop playing
                return hr;
            }
        }
    } else {
        m_rtStart = rtStart;
        m_rtStop  = rtStop;
        m_pSampleToDeliver = NULL;
        hr = GetFreeSurfaceIndex(m_nSurfaceIndex, &m_pSampleToDeliver, rtStart, rtStop);
        if (FAILED(hr)) {
            ASSERT(hr == VFW_E_NOT_COMMITTED);      // Normal when stop playing
            return hr;
        }
    }

    if (m_pSampleToDeliver == NULL) {
        return S_FALSE;
    }

    CHECK_HR_TRACE(BeginFrame(m_nSurfaceIndex, m_pSampleToDeliver));

    if (m_bSecondField) {
        if (!m_PictureParams.bSecondField) {
            UpdatePictureParams(m_nSurfaceIndex);
        }
    } else {
        UpdatePictureParams(m_nSurfaceIndex);
    }

    TRACE_MPEG2("CDXVADecoderMpeg2::DecodeFrame() : Surf = %d, PictureType = %d, SecondField = %d, m_nNextCodecIndex = %d, rtStart = [%I64d]\n",
                m_nSurfaceIndex, nSliceType, m_PictureParams.bSecondField, m_nNextCodecIndex, rtStart);

    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_InverseQuantizationMatrixBufferType, sizeof(m_QMatrixData), &m_QMatrixData));

    // Send bitstream to accelerator
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(DXVA_SliceInfo)*m_nSliceCount, &m_SliceInfo));
    CHECK_HR_TRACE(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize, pDataIn, &nSize));

    // Decode frame
    CHECK_HR_TRACE(Execute());
    CHECK_HR_TRACE(EndFrame(m_nSurfaceIndex));

    if (m_bSecondField) {
        if (m_PictureParams.bSecondField) {
            AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), m_rtStart, m_rtStop,
                       false, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx()));
            hr = DisplayNextFrame();
        }
    } else {
        AddToStore(m_nSurfaceIndex, m_pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), m_rtStart, m_rtStop,
                   false, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, FFGetCodedPicture(m_pFilter->GetAVCtx()));
        hr = DisplayNextFrame();
    }

    m_bFlushed = false;

    return hr;
}
Esempio n. 29
0
void KX_TouchSensor::UnregisterToManager()
{
	// before unregistering the sensor, make sure we release all references
	EndFrame();
	SCA_ISensor::UnregisterToManager();
}
Esempio n. 30
0
// === Public functions
HRESULT TDXVADecoderVC1::DecodeFrame(BYTE* pDataIn, UINT nSize, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop)
{
    HRESULT               hr;
    int                   nSurfaceIndex;
    CComPtr<IMediaSample> pSampleToDeliver;
    int                   nFieldType, nSliceType;
    UINT                  nFrameSize, nSize_Result;

    m_pCodec->libavcodec->FFVC1UpdatePictureParam(&m_PictureParams, m_pCodec->avctx, &nFieldType, &nSliceType, pDataIn, nSize, &nFrameSize, FALSE, &m_bFrame_repeat_pict);

    if (m_pCodec->libavcodec->FFIsSkipped(m_pCodec->avctx)) {
        return S_OK;
    }

    // Wait I frame after a flush
    if (m_bFlushed && ! m_PictureParams.bPicIntra) {
        return S_FALSE;
    }

    hr = GetFreeSurfaceIndex(nSurfaceIndex, &pSampleToDeliver, rtStart, rtStop);
    if (FAILED(hr)) {
        ASSERT(hr == VFW_E_NOT_COMMITTED);  // Normal when stop playing
        return hr;
    }

    CHECK_HR(BeginFrame(nSurfaceIndex, pSampleToDeliver));

    DPRINTF(_l("TDXVADecoderVC1::DecodeFrame - PictureType = %s, rtStart = %I64d  Surf=%d\n"), m_pCodec->libavcodec->GetFFMpegPictureType(nSliceType), rtStart, nSurfaceIndex);

    m_PictureParams.wDecodedPictureIndex    = nSurfaceIndex;
    m_PictureParams.wDeblockedPictureIndex  = m_PictureParams.wDecodedPictureIndex;

    // Manage reference picture list
    if (!m_PictureParams.bPicBackwardPrediction) {
        if (m_wRefPictureIndex[0] != NO_REF_FRAME) {
            RemoveRefFrame(m_wRefPictureIndex[0]);
        }
        m_wRefPictureIndex[0] = m_wRefPictureIndex[1];
        m_wRefPictureIndex[1] = nSurfaceIndex;
    }
    m_PictureParams.wForwardRefPictureIndex  = (m_PictureParams.bPicIntra == 0)                ? m_wRefPictureIndex[0] : NO_REF_FRAME;
    m_PictureParams.wBackwardRefPictureIndex = (m_PictureParams.bPicBackwardPrediction == 1) ? m_wRefPictureIndex[1] : NO_REF_FRAME;

    m_PictureParams.bPic4MVallowed       = (m_PictureParams.wBackwardRefPictureIndex == NO_REF_FRAME && m_PictureParams.bPicStructure == 3) ? 1 : 0;
    m_PictureParams.bPicDeblockConfined |= (m_PictureParams.wBackwardRefPictureIndex == NO_REF_FRAME) ? 0x04 : 0;

    m_PictureParams.bPicScanMethod++; // Use for status reporting sections 3.8.1 and 3.8.2

    DPRINTF(_l("TDXVADecoderVC1::DecodeFrame - Decode frame %i\n"), m_PictureParams.bPicScanMethod);

    // Send picture params to accelerator
    CHECK_HR(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));

    // Send bitstream to accelerator
    CHECK_HR(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nFrameSize ? nFrameSize : nSize, pDataIn, &nSize_Result));

    m_SliceInfo.wQuantizerScaleCode = 1;    // TODO : 1->31 ???
    m_SliceInfo.dwSliceBitsInBuffer = nSize_Result * 8;
    CHECK_HR(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(m_SliceInfo), &m_SliceInfo));

    // Decode frame
    CHECK_HR(Execute());
    CHECK_HR(EndFrame(nSurfaceIndex));

    // ***************
    if (nFrameSize) { // Decoding Second Field
        m_pCodec->libavcodec->FFVC1UpdatePictureParam(&m_PictureParams, m_pCodec->avctx, NULL, NULL, pDataIn, nSize, NULL, TRUE, &m_bFrame_repeat_pict);

        CHECK_HR(BeginFrame(nSurfaceIndex, pSampleToDeliver));

        DPRINTF(_l("TDXVADecoderVC1::DecodeFrame - PictureType = %s\n"), m_pCodec->libavcodec->GetFFMpegPictureType(nSliceType));

        CHECK_HR(AddExecuteBuffer(DXVA2_PictureParametersBufferType, sizeof(m_PictureParams), &m_PictureParams));

        // Send bitstream to accelerator
        CHECK_HR(AddExecuteBuffer(DXVA2_BitStreamDateBufferType, nSize - nFrameSize, pDataIn + nFrameSize, &nSize_Result));

        m_SliceInfo.wQuantizerScaleCode = 1;        // TODO : 1->31 ???
        m_SliceInfo.dwSliceBitsInBuffer = nSize_Result * 8;
        CHECK_HR(AddExecuteBuffer(DXVA2_SliceControlBufferType, sizeof(m_SliceInfo), &m_SliceInfo));

        // Decode frame
        CHECK_HR(Execute());
        CHECK_HR(EndFrame(nSurfaceIndex));
    }
    // ***************

#ifdef _DEBUG
    DisplayStatus();
#endif

    // Re-order B frames
    if (m_pCodec->isReorderBFrame()) {
        if (m_PictureParams.bPicBackwardPrediction == 1) {
            SwapRT(rtStart, m_rtStartDelayed);
            SwapRT(rtStop,  m_rtStopDelayed);
        } else {
            // Save I or P reference time (swap later)
            if (!m_bFlushed) {
                if (m_nDelayedSurfaceIndex != -1) {
                    UpdateStore(m_nDelayedSurfaceIndex, m_rtStartDelayed, m_rtStopDelayed);
                }
                m_rtStartDelayed = m_rtStopDelayed = _I64_MAX;
                SwapRT(rtStart, m_rtStartDelayed);
                SwapRT(rtStop,  m_rtStopDelayed);
                m_nDelayedSurfaceIndex = nSurfaceIndex;
            }
        }
    }

    AddToStore(nSurfaceIndex, pSampleToDeliver, (m_PictureParams.bPicBackwardPrediction != 1), rtStart, rtStop,
               false, (FF_FIELD_TYPE)nFieldType, (FF_SLICE_TYPE)nSliceType, 0);

    m_bFlushed = false;

    return DisplayNextFrame();
}