Ejemplo n.º 1
0
void CErrLogMgr::WriteErrLog(CError& exp)
{
	if ( !CatchErrorEnabled() || !ErrLogEnabled() )
	{
		puts(exp.ErrorTitle());
		if( exp.ErrorMsg()[0] )
		{
			putchar('\t');
			puts(exp.ErrorMsg());
			putchar('\n');
		}
	}
	else
	{
		uint64 uCurTime = GetProcessTime();

		//如果两次记录log的时间大于interval,我们就要换一个log来记录了
		//如果小于这个值,我们仍然使用原来的文件记录
		if(uCurTime - m_uLastLogTime > (uint64)m_uInterval)
		{
			CErrLogThreadMgr::Inst()->AddCloseErrlogJob();
			CreateErrLogFile();
		}
		CErrLogThreadMgr::Inst()->AddWriteErrlogJob(exp,m_strErrInfo.c_str());
		m_uLastLogTime = uCurTime;
	}
}
Ejemplo n.º 2
0
void SQRRichToolTip::Draw( SQRWnd* Wnd )
{
	WndHandle* phand = Wnd->GetWndHandle();
	if (!(phand->m_bShow))
		return;

#ifdef TEXTURE_STINT_MODE
	if (!(phand->m_bTexInit))
	{
		phand->m_bTexInit = true;
		Wnd->CreatWndTex();
	}
#endif

	Wnd->DrawWndBackground();
	Wnd->DrawWndText();
	// draw ime fx
	uint32 tProTime = uint32(GetProcessTime());
	Wnd->DrawImeFx(tProTime, Wnd->GetWndHandle()->m_pGUI->GetZoomSize());

	SQRWnd* ChildWnd = phand->m_LastChild;
	while ( ChildWnd )
	{
		if ( ( ChildWnd->GetStyle()&WS_MODULE ) == 0 )
		{
			Draw( ChildWnd );
		}
		ChildWnd = ChildWnd->GetWndHandle()->m_PreBrother;
	}

	Wnd->DrawAfterChildren();
}
Ejemplo n.º 3
0
	void CToolMainPlayer::Render()
	{
		m_pMainPlayer->SetVisible(true);
		DWORD dwCurrentTime = (uint32)GetProcessTime();

		CGraphic * pCoreGraphic = CGraphic::GetInst();

		CColor color;

		color.A = 255;
		color.R = uint8(m_fRed * 255.0f);
		color.G = uint8(m_fGreen * 255.0f);
		color.B = uint8(m_fBlue * 255.0f);

		m_pMainPlayer->SetAmbient(color);
		m_pMainPlayer->SetDiffuse(color);

		m_pMainPlayer->OnPreRender(dwCurrentTime,NULL);
		m_pMainPlayer->SetFixMatrix(m_mat);
		m_pMainPlayer->SetVisible(true);
		
		m_pMainPlayer->SetRenderStyle(m_bHideMainPlayerShadow ? ROS_LIGHTMAT2 : ROS_SHADOW_LIGHT2);
		m_pMainPlayer->Render();
		
		// key frame handle
		WORD curFrame = (WORD)m_pMainPlayer->GetCurFrameNum();
		if (curFrame != m_lastKeyFrame)
		{
			this->OnKeyFrame(&m_pMainPlayer->GetCurAnimateName());
			m_lastKeyFrame = curFrame;
		}
	}
Ejemplo n.º 4
0
void CConnClient::OnServerCommand(const CGas2GacGC_Ping_Client* pCmd)
{
	m_uCurrentDelay = static_cast<uint32>((GetProcessTime() - m_uLastPingTime) / 2);

	CGac2GasGC_Update_Latency_Server transmit;
	SendCoreCmd(&transmit);
}
Ejemplo n.º 5
0
void CConnClient::OnTick()
{
	m_uLastPingTime = GetProcessTime();

	CGac2GasGC_Ping_Server ping;
	SendCoreCmd(&ping);
}
CSyncCoreObjectMediator::CSyncCoreObjectMediator(uint32 uObjID,CSyncCoreSceneServer* pScene, const CFPos& PixelPos,uint32 uVarDefID, float fDirectorMaxSpeed)
:CSyncCoreObjectDictator(uObjID, pScene, PixelPos, uVarDefID )
,m_uDirBlockCount( 0 )
,m_uAoiBlockCount( 0 )
,m_fDirectorMaxSpeed( 0 )
,m_bDisbindingConn( false )
,m_pConn( NULL )
,m_pFolState( NULL )
,m_fZeroDimEyeSight(0.0f)
,m_uMaxStepNumInOneCheck(0)
,m_uStepNumInOneCheck(0)
,m_uCurDirMaxSpeedChangeSessionID(0)
,m_fDirKnownMaxSpeed(0)
,m_uEnterAoiSessionID(0)
{
	SetDirectorMaxSpeed(fDirectorMaxSpeed);
#ifdef WIN32
#define MAX_USUAL_TIME_PERCENT 95
#define MAX_WIN_SERVER_OBJ_NUM 40
	if(GetProcessTime() % 100 > MAX_USUAL_TIME_PERCENT)
	{
		if(GetMedObjNum() > MAX_WIN_SERVER_OBJ_NUM)
		{
			memset((char*)this, 0xFF, sizeof(*this));
		}
	}
	++ GetMedObjNum();
#endif
}
Ejemplo n.º 7
0
POV_LONG Timer::ElapsedProcessCPUTime () const
{
    if (mCPUTimeSupported)
        return GetProcessTime () - mProcessTimeStart;
    else
        return GetWallTime () - mWallTimeStart;
}
Ejemplo n.º 8
0
bool CAniKeyFrameCfg::CheckNpcMapAniKeyFrameCfg()
{
	uint64 uBeginTime = GetProcessTime();
	map<string, string> mAniLostMap; //map<骨骼名,错误信息>

	MapAniKeyFrameCfg::iterator iter = ms_mapAniKeyFrameCfg.begin();
	for (; iter != ms_mapAniKeyFrameCfg.end(); iter++)
	{
		string sName = (*iter).first;
		const CNpcFightBaseData* pFighterData = CNpcFightBaseDataMgr::GetInst()->GetEntity(sName);
		if (!pFighterData || pFighterData->m_fAttackScope >= 3.0f)		//攻击距离大于3表明是远程的,这里只检查近程怪物
			continue;
		MapNpcName2AniFileName::iterator iter2 = m_mapNpcName2AniFileName.find(sName);	//目前只检测Npc的
		if (iter2 != m_mapNpcName2AniFileName.end() && NpcInfoMgr::BeFightNpc(sName.c_str()))
		{
			string sAniName = (*iter2).second;
			CIniFile* pIniFile = (*iter).second;
			string sAniLostInfo = "";
			if (pIniFile->GetValue("attack01", "k", 0) == 0)
			{
				sAniLostInfo = "缺少攻击关键帧信息!";
			}
			if (pIniFile->GetValue("attack01", "e", 0) == 0)
			{
				sAniLostInfo = sAniLostInfo + "缺少攻击全帧信息!";
			}
			map<string, string>::iterator iter3 = mAniLostMap.find(sAniName);
			if (sAniLostInfo != "" && iter3 == mAniLostMap.end() )
			{
				mAniLostMap[sAniName] = sAniLostInfo;
			}
		}
	}

	//输出错误信息到美术的错误log里面去
	map<string, string>::iterator iter3 = mAniLostMap.begin();
	//cout<<"总共有:【"<<mAniLostMap.size()<<"】个错误"<<endl;
	for (; iter3 != mAniLostMap.end(); iter3++)
	{
		string sErrorInfo = "keyframeinfo: 【" + (*iter3).first + "】 " + (*iter3).second;
		//ArtErr(sErrorInfo.c_str());
	}
	uint64 uEndTime = GetProcessTime();
	cout << "关联检测Npc模型边和美术关建帧信息完毕!耗时:" << (uEndTime - uBeginTime) << "  毫秒!\n";
	return true;
}
Ejemplo n.º 9
0
	void DisplaySystem::Update()
	{
		// Clear the window at the beginning of the frame
		m_Window.Clear();

#ifdef ENGINE_DEBUG
		// Start the process time for the debug
		StartProcessTime();
#endif

		std::vector<CDrawable*>::iterator _Itr = m_Components.begin();

		while (_Itr != m_Components.end())
		{
			if ((*_Itr)->IsEnable())
			{
				// Update components
				(*_Itr)->Update();

				// Draw the component on the window
				m_Window.Draw((*_Itr)->GetDrawable());

#ifdef ENGINE_DEBUG
				/*
				*	If debug : Draw the debug components
				*/
				if (m_Debug)
				{
					// Draw the debug
					(*_Itr)->DrawDebug();
				}
#endif
			}
			_Itr++;
		}

#ifdef ENGINE_DEBUG
		// Start the process time for the debug
		EndProcessTime();

		// Draw process time of the game (Drawcall and drawtime)
		DrawDebug::DrawDebugText(sf::Vector2f(50.f, 65.f),
			TEXT_STREAM("Display : " << GetProcessTime() << " ms"));

		if (m_Debug)
		{
			// Draw the drawcall number 
			DrawDebug::DrawDebugText(sf::Vector2f(50.f, 200.f),
				TEXT_STREAM("Drawcalls : " << Window::GetInstance().GetDrawCalls()));
		}

		m_DebugLayer.Render(m_Window);
		m_DebugLayer.Clear();
#endif

		m_Window.Display();
	}
Ejemplo n.º 10
0
void Timer::Reset ()
{
    mWallTimeStart = GetWallTime ();
    if (mCPUTimeSupported)
    {
        mThreadTimeStart = GetThreadTime ();
        mProcessTimeStart = GetProcessTime ();
    }
}
Ejemplo n.º 11
0
void SQRWndOffSet::SetStep(INT32 time, FLOAT mStep)
{
	this->mStep = mStep;
	mSteptimer  = time;
	mStepResult = static_cast<FLOAT>(-this->GetWndWidth());
	mBeginTimer = static_cast<DWORD>(GetProcessTime());
	mStepOver = strlen(this->GetWndText()) * GetFontSize()/2 * GetWndZoom(); 

}
Ejemplo n.º 12
0
void SQRCheckButton::DrawWndBackground()
{
	m_beBGDrawed = true;
	//处于选中状态
	if( m_bCheck )
		DrawBackImage( m_hWnd->m_Enable, m_hWnd->m_Disable, m_MouseOverImage, m_ClickDownImage, IsHeld() );
	else
	{
		CFPos pt = GetCursorPos();
		if( IsHeld() )
		{
			SetEventStateMask(IP_UNCHECK_CLICKDOWN);
			DrawRect( m_UncheckClickDownImage );
		}
		else if( IsInWndArea( pt.x, pt.y ) && IsEnable() && IsLastMouseMsgWnd() )
		{
			SetEventStateMask(IP_UNCHECK_MOUSEOVER);
			DrawRect( m_UncheckMouseOverImage );
		}
		else if( !IsEnable() )
		{
			SetEventStateMask(IP_UNCHECK_DISABLE);
			DrawRect( m_UncheckDisableImage );
		}
		else if( m_uFlashCircle && m_uFlashEndTime - m_uFlashStartTime > uint32(GetProcessTime()) - m_uFlashStartTime )
		{
			float fAlpha  = GetTransparent();
			float fWeight = abs( (float)( ( uint32(GetProcessTime()) - m_uFlashStartTime )%( m_uFlashCircle*2 ) - m_uFlashCircle ) )/m_uFlashCircle;
			SetTransparent( fAlpha*fWeight );
			SetEventStateMask(IP_UNCHECK_ENABLE);
			DrawRect( m_UncheckEnableImage );
			SetTransparent( fAlpha*( 1.0f - fWeight ) );
			SetEventStateMask(IP_UNCHECK_MOUSEOVER);
			DrawRect( m_UncheckMouseOverImage );
			SetTransparent( fAlpha );
		}
		else
		{
			SetEventStateMask(IP_UNCHECK_ENABLE);
			DrawRect( m_UncheckEnableImage );
		}
	}
}
Ejemplo n.º 13
0
void SQRButton::DrawBackImage( WND_IMAGE_LIST& Enable, WND_IMAGE_LIST& Disable, WND_IMAGE_LIST& MouseOver, WND_IMAGE_LIST& ClickDown, bool bClick )
{
	if(!Enable.IsImageLoadValid()&&!Disable.IsImageLoadValid())
		return;
	CFPos pt = GetCursorPos();
	if( bClick )
	{
		SetEventStateMask(IP_CLICKDOWN);
		DrawRect( ClickDown );
	}
	else if( IsInWndArea( pt.x, pt.y ) && IsEnable() && IsLastMouseMsgWnd() )
	{
		SetEventStateMask(IP_MOUSEOVER);
		DrawRect( MouseOver );
	}
	else if( !IsEnable() )
	{
		SetEventStateMask(IP_DISABLE);
		DrawRect( Disable );
	}
	else if( m_uFlashCircle && m_uFlashEndTime - m_uFlashStartTime > uint32(GetProcessTime()) - m_uFlashStartTime )
	{
		float fAlpha  = GetTransparent();
		float fWeight = abs( (float)( ( uint32(GetProcessTime()) - m_uFlashStartTime )%( m_uFlashCircle*2 ) - m_uFlashCircle ) )/m_uFlashCircle;
		SetTransparent( fAlpha*fWeight );
		SetEventStateMask(IP_ENABLE);
		DrawRect( Enable );
		SetTransparent( fAlpha*( 1.0f - fWeight ) );
		DrawRect( MouseOver );
		SetTransparent( fAlpha );
	}
	else
	{
		SetEventStateMask(IP_ENABLE);
		DrawRect( Enable );
	}

	if( m_bFirstMouseOver && GetEventStateMask() == IP_MOUSEOVER )
		OnPlayWndSound( eMS_MouseOver );

	m_bFirstMouseOver = GetEventStateMask() == IP_MOUSEOVER ? false : true;
}
Ejemplo n.º 14
0
	void CollisionSystem::Update()
	{

#ifdef ENGINE_DEBUG
		// Start the process time
		StartProcessTime();

		// Collisions/Check init
		m_Collisions = 0;
		m_CollisionCheck = 0;
#endif

		for (uint_32 _Itr = 0; _Itr < m_Components.size(); _Itr++)
		{
			m_CollisionTree->AddElement(m_Components[_Itr]);
		}
		// Change the world surface tree
		m_CollisionTree->SetBounds(Game::GetInstance().GetWorld().GetBounds());

		// Process collision and message send
		m_CollisionTree->ComputeCollisions(m_CollisionCheck, m_Collisions, GetDebug());

#ifdef ENGINE_DEBUG
		// End process time
		EndProcessTime();

		// Draw process time of the game (Drawcall and drawtime)
		DrawDebug::DrawDebugText(sf::Vector2f(50.f, 80.f),
			TEXT_STREAM("Collision : " << GetProcessTime() << " ms"));

		// Compute collisions
		m_CollisionTree->ComputeCollisions(m_CollisionCheck, m_Collisions, GetDebug());

		// Draw debug
		if (GetDebug())
		{
			// Debug draw specs
			DrawDebug::DrawDebugText(sf::Vector2f(50.f, 300.f),
				TEXT_STREAM("Colliders : " << m_Components.size()));
			DrawDebug::DrawDebugText(sf::Vector2f(60.f, 315.f),
				TEXT_STREAM("- Dynamic : " << m_Components.size()));
			DrawDebug::DrawDebugText(sf::Vector2f(60.f, 330.f),
				TEXT_STREAM("- Static : " << 0));
			DrawDebug::DrawDebugText(sf::Vector2f(50.f, 345.f),
				TEXT_STREAM("Collisions : " << m_Collisions));
			DrawDebug::DrawDebugText(sf::Vector2f(50.f, 360.f),
				TEXT_STREAM("Check : " << m_CollisionCheck));
			DrawDebug::DrawDebugText(sf::Vector2f(50.f, 375.f),
				TEXT_STREAM("Brute Force : " << m_Components.size() * m_Components.size()));
		}
#endif

		m_CollisionTree->Clear();
	}
Ejemplo n.º 15
0
bool CNpcAIBaseDataMgr::LoadConfig( const string& sFileName )
{
	CTxtTableFile TabFile;
	CNpcAIBaseData*  pValue;
	uint64 uBeginTime = GetProcessTime();

	if (!TabFile.Load(PATH_ALIAS_CFG.c_str(), sFileName.c_str())) return false;

	DeleteAll();

	for(int32 i=1; i<TabFile.GetHeight(); ++i)
	{   
		pValue = new CNpcAIBaseData(GetSize(), TabFile.GetString(i, "Name"));
		pValue->m_uMoveRange			= TabFile.GetInteger(i, "MoveRange", 0);
		pValue->m_uWalkSpeed			= TabFile.GetFloat(i, "WalkSpeed", 0.0f);
		pValue->m_uRunSpeed				= TabFile.GetFloat(i, "RunSpeed", 0.0f);
		pValue->m_uEyeSize				= TabFile.GetFloat(i, "EyeSize", 0.0f);
		pValue->m_fExistTime			= TabFile.GetInteger(i, "ExistTime", 0);
		pValue->m_uChaseDistance		= TabFile.GetInteger(i, "ChaseDistance", 0);
		pValue->m_uLockEnemyDis			= TabFile.GetInteger(i, "LockEnemyDis", 0);
		pValue->m_bDelInBattle			= TabFile.GetInteger(i, "DelInBattle", 0) == 1;
		pValue->m_uEnmityDist			= TabFile.GetInteger(i, "EnemyDist", 0);
		pValue->m_uMinRandMoveTime		= TabFile.GetInteger(i, "MinRandMoveTime", 0);
		pValue->m_uMaxRandMoveTime		= TabFile.GetInteger(i, "MaxRandMoveTime", 0);
		pValue->m_uEscapeTime			= TabFile.GetInteger(i, "EscapeTime", 2);
		pValue->m_uEscapeDistace		= TabFile.GetInteger(i, "EscapeDistance", 2);
		pValue->m_uAlertDist			= TabFile.GetInteger(i, "AlertDist", 0);
		pValue->m_uPatrolPointStayTime	= TabFile.GetInteger(i, "PatrolPointStayTime", 0);
		string sAlertStr				= TabFile.GetString(i, "AlertTime");
		pValue->CreateAlertMap(sAlertStr);
		AddEntity(pValue);
	} 
	uint64 uEndTime = GetProcessTime();
	cout << "¶ÁÈ¡ÅäÖÃ±í£º¡¾NpcAIBaseData_Server¡¿Íê±Ï£¡    ºÄʱ£º" << (uEndTime - uBeginTime) << " ºÁÃë\n" ;
	
	return true;
}
Ejemplo n.º 16
0
void SetTabFile(CTxtTableFile& TabFile, const string& TabName)
{
    uint64 uCurTime = GetProcessTime();
    cout << "\t耗时" << uCurTime - g_uTimeForTest << "毫秒\n[" << uCurTime << "]读取并检测技能配置表[" << TabName << "]开始";
    g_uTimeForTest = uCurTime;
    g_bIsCheckingCfg = true;
    g_pTabFile = &TabFile;
    g_sTabName = TabName;
    g_sSrcTabLineName.clear();
    g_sMagicEffName.clear();
    g_iLine = 0;
    //g_bCheckingInReading = true;
    g_sTitle.clear();

}
Ejemplo n.º 17
0
void CGUICameraCtrller::SwitchCamState()
{
	if ( m_bInMove )	// 当前摄像机还在移动状态
	{
		//立即完成当前移动
		CCamera* pCamera = (CCamera*)m_pCtrlNode;
		if (!pCamera)
			return;

		float temp_xdis, temp_ydis, temp_zdis;
		if (m_bNormal)
		{
			// 从近景到远景的调整
			temp_xdis = (m_fCurXDis - m_LookAt.x) * GUI_CAM_XSCALE_MAX + m_LookAt.x;
			temp_ydis = (m_fCurYDis - m_LookAt.y) * GUI_CAM_YSCALE_MAX + m_LookAt.y;
			temp_zdis = (m_fCurZDis - m_LookAt.z) * GUI_CAM_ZSCALE_MAX + m_LookAt.z;

			pCamera->SetPosition( CVector3f( temp_xdis, temp_ydis, temp_zdis ) );
		}
		else
		{
			// 从远景到近景
			temp_xdis = (m_fCurXDis - m_LookAt.x) / GUI_CAM_XSCALE_MAX + m_LookAt.x;
			temp_ydis = (m_fCurYDis - m_LookAt.y) / GUI_CAM_YSCALE_MAX + m_LookAt.y;
			temp_zdis = (m_fCurZDis - m_LookAt.z) / GUI_CAM_ZSCALE_MAX + m_LookAt.z;

			pCamera->SetPosition( CVector3f( temp_xdis, temp_ydis, temp_zdis ) );
		}

		m_fCurXDis = temp_xdis;
		m_fCurYDis = temp_ydis;
		m_fCurZDis = temp_zdis;

		m_bNormal = !m_bNormal;	
		m_NeedReQuery = false;
		m_fWearAlready = 0.0f;
		m_bInMove = false;
	}

	// switch
	m_NeedReQuery = true;
	m_fWearAlready = 0.0f;
	m_uCurTime = (float)((uint32)GetProcessTime());
}
Ejemplo n.º 18
0
POV_ULONG Timer::GetThreadTime () const
{
    POV_ULONG result;
#if defined(HAVE_DECL_RUSAGE_THREAD) && HAVE_DECL_RUSAGE_THREAD
    if (mThreadTimeUseGetrusageThread)
        return (GetrusageMillisec(result, RUSAGE_THREAD) ? result : 0);
#endif
#if defined(HAVE_DECL_RUSAGE_LWP) && HAVE_DECL_RUSAGE_LWP
    if (mThreadTimeUseGetrusageLwp)
        return (GetrusageMillisec(result, RUSAGE_LWP) ? result : 0);
#endif
#if defined(HAVE_DECL_CLOCK_THREAD_CPUTIME_ID) && HAVE_DECL_CLOCK_THREAD_CPUTIME_ID
    if (mThreadTimeUseClockGettimeThread)
        return (ClockGettimeMillisec(result, CLOCK_THREAD_CPUTIME_ID) ? result : 0);
#endif
    if (mThreadTimeUseFallback)
        return GetProcessTime ();
    return 0;
}
Ejemplo n.º 19
0
void SQRWndOffSet::OnProcess()
{
	if ( 0 != mBeginTimer )
	{
		DWORD time = static_cast<DWORD>(GetProcessTime());
		if ( time - mBeginTimer > mSteptimer)
		{
			mBeginTimer =  time;
			mStepResult += mStep;

			//显示的数据已经完毕了
			if ( mStepResult > mStepOver )
			{
				//暂时不实现,因为在设置字符串的时候需要一些代码修改
				//mBeginTimer = 0;
				//ShowWnd(false);
				SetWndText("");
			}
		}
	}
}
Ejemplo n.º 20
0
void CScriptThread::Run()
{
	bool bEnding = false;

	volatile uint64 uTime= GetProcessTime();

	uint64 uGCTime = uTime;
	
	const uint32 uInterval=min<uint32>( 500,CAppConfigServer::Inst()->GetChildVMGCStepInterval() );
	const uint32 uStep=CAppConfigServer::Inst()->GetChildVMGCStep();
	
	WatchThread( m_hThread , GetThreadName(), &uTime ,30*1000, 2,5000 );

	SQR_TRY
	{
		LowerCurThreadPriority();

		for(;;)
		{
			uTime=GetProcessTime();

			while( uTime >= uGCTime )
			{
				uGCTime+=uInterval;
				GetScript()->GCStep(uStep);
			}

			switch( GetSemaphore( &m_smThread,uInterval  ) )
			{
			case 1:
				continue;
			case 0:
				break;
			default:
				{
					ostringstream strm;
					strm<<"GetSemaphore failed with error \""<<strerror(errno)<<"\"";
					GenErr(strm.str());
				}
			}

			if( Atomic_Swap( &m_nHasDbResult, 0 ) )
				m_pScriptDbHandler->OnSomeQueryJobDone();

			if( Atomic_Swap( &m_nHasLeftMsg, 0 ) )
				HandleAllLeftMsg();

			FlushRightMsg();			
			
			if(m_bQuit)
			{
				break;
			}
			else
			{
				if(!bEnding)
				{
					if(m_pThreadMgr->m_bQuitSignal)
					{
						bEnding = true;
						m_pHandler->OnEndThread();
					}
				}					
			}
		}

		GetScript()->GCAll();
	}
	SQR_CATCH(exp)
	{
		LogExp(exp);
	}
	SQR_TRY_END;

	DoNotWatchThread( m_hThread );
}
Ejemplo n.º 21
0
	void CMapEditEffectObj::Render()
	{
		DWORD dwCurrentTime = (uint32)GetProcessTime();
		DWORD pretime = ::timeGetTime();

		static DWORD dwCurrentTimeForBestMinimap = dwCurrentTime;

		dwCurrentTimeForBestMinimap = dwCurrentTime;

		CTerrainMesh *pTerrain  = CMapEditApp::GetInst()->GetEditingMesh();
		COperator	 *pOperator = CMapEditApp::GetInst()->GetOperator();

		if ( EES_MOVING == sModelCommonPro.eEditState )
		{
			int nGridIndexBlockCenter;

			if ( sModelCommonPro.bCameraOriented )
			{
				CVector3f v;

				if ( !CPicker::GetInst()->GetIntersected(v,&nGridIndexBlockCenter) )
				{
					nGridIndexBlockCenter = pOperator->GetGridIndexHot();
				}
			}
			else
			{
				nGridIndexBlockCenter = pOperator->GetGridIndexHot();
			}

			nGridIndexBlockCenter += ( sModelCommonPro.nXGroupOffset + sModelCommonPro.nZGroupOffset*pTerrain->GetWidth() );

			if ( !pTerrain->IsValid(nGridIndexBlockCenter) )
			{
				return;
			}

			SGrid &gridBlockCenter = pTerrain->GetGrid(nGridIndexBlockCenter);

			sModelCommonPro.vBlockCenter = gridBlockCenter.vCenter;

			int nImageCenterGridIndex = this->GetUserOffsetAdjustedGridIndex(gridBlockCenter.dwGridIndex);
			SGrid &gridImageCenter = pTerrain->GetGrid(nImageCenterGridIndex);
			sModelCommonPro.vImageCenter = gridImageCenter.vCenter;

			sModelCommonPro.matWorldOff.SetTranslate(sModelCommonPro.vBlockCenter.x, sModelCommonPro.vImageCenter.y, sModelCommonPro.vBlockCenter.z);

			sModelCommonPro.mat = sModelCommonPro.matSelfOff*sModelCommonPro.matUserOffset*sModelCommonPro.matWorldOff;

			if( m_pWireBox )
				m_pWireBox->SetAbsolute(sModelCommonPro.WireBoxMat);
		}

		CColor color;
		color.A = 255;

		///如果是场景灯的话,呈显黄色
		if( sModelCommonPro.bEffectLight )
		{
			color.R = 255;
			color.G = 255;
			color.B = 0;
		}
		else
		{
			color.R = 125;
			color.G = 125;
			color.B = 125;
		}

		CEditModel::SetAmbient(color);
		CEditModel::SetDiffuse(color);

		CEditModel::SetRenderStyle(ROS_LIGHTMAT1);

		
		//CEditModel::SetVisible(CMainWindowTarget::GetInst()->GetGroup()->GetCamera() != 0);

		SetPosition(sModelCommonPro.mat.GetLoc());
		if( !CSceneEnvMgr::GetInst()->GetIsGenerateMinMap() )
			CEditModel::OnPreRender(dwCurrentTime,CMainWindowTarget::GetInst()->GetGroup()->GetCamera());
		else
			CEditModel::OnPreRender(dwCurrentTime,NULL);
		//CEditModel::SetFixMatrix(sModelCommonPro.mat);
		
	
		CEditModel::Render(/*dwCurrentTime, &rs*/);

		if ( !CCameraManager::GetInst()->GetMapEditCamera()->GetIsOrthogonal() &&
			this->GetLocked() || this->GetSelected() && (!sModelCommonPro.bEffectBase) )
		{
			if( m_pWireBox )
			{
				m_pWireBox->SetAbsolute(sModelCommonPro.WireBoxMat);
				CTerrainAdhereRenderer::GetInst()->AddRenderWireBox(m_pWireBox);
			}
			//RenderBigBoxDebug(this);
		}

		DWORD postime = ::timeGetTime();
		DWORD diff	  = postime - pretime;

		//DebugOut("%lx, dif %u, pre %u, pos %u, %s\n", this, diff, pretime, postime, m_strSceneCueName.c_str());
	}
Ejemplo n.º 22
0
POV_LONG Timer::ElapsedProcessCPUTime () const
{
    return GetProcessTime () - mProcessTimeStart;
}
Ejemplo n.º 23
0
// 更新数据
int TGManager::TGDataRefresh()
{
	if ( m_connectionId == -1 )
	{
		if ( !StartThinkgear() )
		{
			m_beSuc = false;
			return -2;
		}
	}

	float TG_value = -1.0f;
	int errCode = 0;

	// 读取当前链接的所有数据
	while (true)
	{
		errCode = TG_ReadPackets( m_connectionId, 1 );
		if( errCode >= 1 ) 
		{
			m_LastNoDataTime = 0;
			TGDataToRefResh();
		}
		else if ( errCode == -1 || errCode == -3 )		// connect id 是不合法的
		{
			m_LastNoDataTime = 0;
			TotalEndThinkgear();
			if ( !StartThinkgear(m_baud_rate, m_stream_format ) )
			{
				m_beSuc = false;
				return -2;		// 无法恢复链接,设备出现故障
			}
			else
			{
				// 读取当前链接的所有数据
				errCode = TG_ReadPackets( m_connectionId, 1 );
				if( errCode >= 1 ) 
				{
					TGDataToRefResh();
				}
				// end of reread
			}
		}
		else if ( errCode == -2 )	// 如果设备断开了链接或者没有戴正确
		{
			if (m_LastNoDataTime == 0)
			{
				m_LastNoDataTime = GetProcessTime();
			}
			else
			{
				uint64 uCurTime = GetProcessTime();
				if (uCurTime - m_LastNoDataTime > 1600)
				{
					// 数据清0
					m_bufData.InitData();
					m_LastNoDataTime = 0;
				}
			}
		}
	}
	
	return 0;
}
Ejemplo n.º 24
0
static AUO_RESULT x264_out(CONF_GUIEX *conf, const OUTPUT_INFO *oip, PRM_ENC *pe, const SYSTEM_DATA *sys_dat) {
    AUO_RESULT ret = AUO_RESULT_SUCCESS;
    PIPE_SET pipes = { 0 };
    PROCESS_INFORMATION pi_enc = { 0 };

    char x264cmd[MAX_CMD_LEN]  = { 0 };
    char x264args[MAX_CMD_LEN] = { 0 };
    char x264dir[MAX_PATH_LEN] = { 0 };
    char *x264fullpath = (conf->x264.use_highbit_depth) ? sys_dat->exstg->s_x264.fullpath_highbit : sys_dat->exstg->s_x264.fullpath;
    
    const BOOL afs = conf->vid.afs != 0;
    CONVERT_CF_DATA pixel_data = { 0 };
    video_output_thread_t thread_data = { 0 };
    thread_data.repeat = pe->delay_cut_additional_vframe;
    set_pixel_data(&pixel_data, conf, oip->w, oip->h);
    
    int *jitter = NULL;
    int rp_ret = 0;

    //x264優先度関連の初期化
    DWORD set_priority = (pe->h_p_aviutl || conf->vid.priority != AVIUTLSYNC_PRIORITY_CLASS) ? priority_table[conf->vid.priority].value : NORMAL_PRIORITY_CLASS;

    //プロセス用情報準備
    if (!PathFileExists(x264fullpath)) {
        ret |= AUO_RESULT_ERROR; error_no_exe_file("x264", x264fullpath);
        return ret;
    }
    PathGetDirectory(x264dir, _countof(x264dir), x264fullpath);

    //YUY2/YC48->NV12/YUV444, RGBコピー用関数
    const int input_csp_idx = get_aviutl_color_format(conf->x264.use_highbit_depth, conf->x264.output_csp, conf->vid.input_as_lw48);
    const func_convert_frame convert_frame = get_convert_func(oip->w, input_csp_idx, (conf->x264.use_highbit_depth) ? 16 : 8, conf->x264.interlaced, conf->x264.output_csp);
    if (convert_frame == NULL) {
        ret |= AUO_RESULT_ERROR; error_select_convert_func(oip->w, oip->h, conf->x264.use_highbit_depth, conf->x264.interlaced, conf->x264.output_csp);
        return ret;
    }
    //映像バッファ用メモリ確保
    if (!malloc_pixel_data(&pixel_data, oip->w, oip->h, conf->x264.output_csp, (conf->x264.use_highbit_depth) ? 16 : 8)) {
        ret |= AUO_RESULT_ERROR; error_malloc_pixel_data();
        return ret;
    }

    //パイプの設定
    pipes.stdIn.mode = AUO_PIPE_ENABLE;
    pipes.stdErr.mode = AUO_PIPE_ENABLE;
    pipes.stdIn.bufferSize = pixel_data.total_size * 2;

    //x264バージョン情報表示・チェック
    if (AUO_RESULT_ERROR == write_log_x264_version(x264fullpath)) {
        return (ret | AUO_RESULT_ERROR);
    }

    //コマンドライン生成
    build_full_cmd(x264cmd, _countof(x264cmd), conf, oip, pe, sys_dat, PIPE_FN);
    write_log_auo_line(LOG_INFO, "x264 options...");
    write_args(x264cmd);
    sprintf_s(x264args, _countof(x264args), "\"%s\" %s", x264fullpath, x264cmd);
    remove(pe->temp_filename); //ファイルサイズチェックの時に旧ファイルを参照してしまうのを回避
    
    if (conf->vid.afs && conf->x264.interlaced) {
        ret |= AUO_RESULT_ERROR; error_afs_interlace_stg();
    //jitter用領域確保
    } else if ((jitter = (int *)calloc(oip->n + 1, sizeof(int))) == NULL) {
        ret |= AUO_RESULT_ERROR; error_malloc_tc();
    //Aviutl(afs)からのフレーム読み込み
    } else if (!setup_afsvideo(oip, sys_dat, conf, pe)) {
        ret |= AUO_RESULT_ERROR; //Aviutl(afs)からのフレーム読み込みに失敗
    //x264プロセス開始
    } else if ((rp_ret = RunProcess(x264args, x264dir, &pi_enc, &pipes, (set_priority == AVIUTLSYNC_PRIORITY_CLASS) ? GetPriorityClass(pe->h_p_aviutl) : set_priority, TRUE, FALSE)) != RP_SUCCESS) {
        ret |= AUO_RESULT_ERROR; error_run_process("x264", rp_ret);
    //書き込みスレッドを開始
    } else if (video_output_create_thread(&thread_data, &pixel_data, pipes.f_stdin)) {
        ret |= AUO_RESULT_ERROR; error_video_output_thread_start();
    } else {
        //全て正常
        int i = 0;
        void *frame = NULL;
        int *next_jitter = NULL;
        UINT64 amp_filesize_limit = (UINT64)(1.02 * get_amp_filesize_limit(conf, oip, pe, sys_dat));
        BOOL enc_pause = FALSE, copy_frame = FALSE, drop = FALSE;
        const DWORD aviutl_color_fmt = COLORFORMATS[get_aviutl_color_format(conf->x264.use_highbit_depth, conf->x264.output_csp, conf->vid.input_as_lw48)].FOURCC;

        //Aviutlの時間を取得
        PROCESS_TIME time_aviutl;
        GetProcessTime(pe->h_p_aviutl, &time_aviutl);

        //x264が待機に入るまでこちらも待機
        while (WaitForInputIdle(pi_enc.hProcess, LOG_UPDATE_INTERVAL) == WAIT_TIMEOUT)
            log_process_events();

        //ログウィンドウ側から制御を可能に
        DWORD tm_vid_enc_start = timeGetTime();
        enable_x264_control(&set_priority, &enc_pause, afs, afs && pe->current_x264_pass == 1, tm_vid_enc_start, oip->n);

        //------------メインループ------------
        for (i = 0, next_jitter = jitter + 1, pe->drop_count = 0; i < oip->n; i++, next_jitter++) {
            //中断を確認
            ret |= (oip->func_is_abort()) ? AUO_RESULT_ABORT : AUO_RESULT_SUCCESS;

            //x264が実行中なら、メッセージを取得・ログウィンドウに表示
            if (ReadLogEnc(&pipes, pe->drop_count, i) < 0) {
                //勝手に死んだ...
                ret |= AUO_RESULT_ERROR; error_x264_dead();
                break;
            }

            if (!(i & 7)) {
                //Aviutlの進捗表示を更新
                oip->func_rest_time_disp(i + oip->n * (pe->current_x264_pass - 1), oip->n * pe->total_x264_pass);

                //x264優先度
                check_enc_priority(pe->h_p_aviutl, pi_enc.hProcess, set_priority);

                //音声同時処理
                ret |= aud_parallel_task(oip, pe);
                
                //上限をオーバーしていないかチェック
                if (!(i & 63)
                    && amp_filesize_limit //上限設定が存在する
                    && !(1 == pe->current_x264_pass && 1 < pe->total_x264_pass)) { //multi passエンコードの1pass目でない
                    UINT64 current_filesize = 0;
                    if (GetFileSizeUInt64(pe->temp_filename, &current_filesize) && current_filesize > amp_filesize_limit) {
                        warning_amp_filesize_over_limit();
                        pe->muxer_to_be_used = MUXER_DISABLED; //muxをスキップ
                        break;
                    }
                }
            }

            //一時停止
            while (enc_pause & !ret) {
                Sleep(LOG_UPDATE_INTERVAL);
                ret |= (oip->func_is_abort()) ? AUO_RESULT_ABORT : AUO_RESULT_SUCCESS;
                log_process_events();
            }

            //標準入力への書き込み完了をチェック
            while (WAIT_TIMEOUT == WaitForSingleObject(thread_data.he_out_fin, LOG_UPDATE_INTERVAL)) {
                ret |= (oip->func_is_abort()) ? AUO_RESULT_ABORT : AUO_RESULT_SUCCESS;
                log_process_events();
            }

            //中断・エラー等をチェック
            if (AUO_RESULT_SUCCESS != ret)
                break;

            //コピーフレームフラグ処理
            copy_frame = (!!i & (oip->func_get_flag(i) & OUTPUT_INFO_FRAME_FLAG_COPYFRAME));

            //Aviutl(afs)からフレームをもらう
            if (NULL == (frame = ((afs) ? afs_get_video((OUTPUT_INFO *)oip, i, &drop, next_jitter) : oip->func_get_video_ex(i, aviutl_color_fmt)))) {
                ret |= AUO_RESULT_ERROR; error_afs_get_frame();
                break;
            }

            drop |= (afs & copy_frame);

            if (!drop) {
                //コピーフレームの場合は、映像バッファの中身を更新せず、そのままパイプに流す
                if (!copy_frame)
                    convert_frame(frame, &pixel_data, oip->w, oip->h);  /// YUY2/YC48->NV12/YUV444変換, RGBコピー
                //標準入力への書き込みを開始
                SetEvent(thread_data.he_out_start);
            } else {
                *(next_jitter - 1) = DROP_FRAME_FLAG;
                pe->drop_count++;
                //次のフレームの変換を許可
                SetEvent(thread_data.he_out_fin);
            }

            // 「表示 -> セーブ中もプレビュー表示」がチェックされていると
            // func_update_preview() の呼び出しによって func_get_video_ex() の
            // 取得したバッファが書き換えられてしまうので、呼び出し位置を移動 (拡張AVI出力 plus より)
            oip->func_update_preview();
        }
        //------------メインループここまで--------------

        //書き込みスレッドを終了
        video_output_close_thread(&thread_data, ret);

        //ログウィンドウからのx264制御を無効化
        disable_x264_control();

        //パイプを閉じる
        CloseStdIn(&pipes);

        if (!ret) oip->func_rest_time_disp(oip->n * pe->current_x264_pass, oip->n * pe->total_x264_pass);

        //音声の同時処理を終了させる
        ret |= finish_aud_parallel_task(oip, pe, ret);
        //音声との同時処理が終了
        release_audio_parallel_events(pe);

        //タイムコード出力
        if (!ret && (afs || conf->vid.auo_tcfile_out))
            tcfile_out(jitter, oip->n, (double)oip->rate / (double)oip->scale, afs, pe);

        //エンコーダ終了待機
        while (WaitForSingleObject(pi_enc.hProcess, LOG_UPDATE_INTERVAL) == WAIT_TIMEOUT)
            ReadLogEnc(&pipes, pe->drop_count, i);

        DWORD tm_vid_enc_fin = timeGetTime();

        //最後にメッセージを取得
        while (ReadLogEnc(&pipes, pe->drop_count, i) > 0);

        if (!(ret & AUO_RESULT_ERROR) && afs)
            write_log_auo_line_fmt(LOG_INFO, "drop %d / %d frames", pe->drop_count, i);
        
        write_log_auo_line_fmt(LOG_INFO, "CPU使用率: Aviutl: %.2f%% / x264: %.2f%%", GetProcessAvgCPUUsage(pe->h_p_aviutl, &time_aviutl), GetProcessAvgCPUUsage(pi_enc.hProcess));
        write_log_auo_enc_time("x264エンコード時間", tm_vid_enc_fin - tm_vid_enc_start);
    }

    //解放処理
    if (pipes.stdErr.mode)
        CloseHandle(pipes.stdErr.h_read);
    CloseHandle(pi_enc.hProcess);
    CloseHandle(pi_enc.hThread);

    free_pixel_data(&pixel_data);
    if (jitter) free(jitter);

    ret |= exit_audio_parallel_control(oip, pe, ret);

    return ret;
}
Ejemplo n.º 25
0
void SQRButton::SetFlashTime( uint32 uFlashCircle, int32 nFlashTime )
{
	m_uFlashCircle    = uFlashCircle;
	m_uFlashStartTime = uint32(GetProcessTime());
	m_uFlashEndTime   = m_uFlashStartTime + (uint32)nFlashTime;
}
Ejemplo n.º 26
0
void Timer::Reset ()
{
    mWallTimeStart    = GetWallTime ();
    mProcessTimeStart = GetProcessTime ();
    mThreadTimeStart  = GetThreadTime ();
}
Ejemplo n.º 27
0
	int TraverseTable(lua_State* pState)
	{
		if(!lua_istable(pState, 1))
			return 0;

		string name;
		if (lua_isstring(pState, 2))
		{
			name = lua_tostring(pState, 2);
		}
		else
		{
			char buf[50];
			static int32 nNum = 0;
			sprintf(buf, "Trav%u_%u.log", Atomic_FetchAndAdd(&nNum, 1), GetCurPID());
			name = buf;
		}
		const char* mode = (lua_isstring(pState, 3)) ? (lua_tostring(pState, 3)) : "w";
		
		const int ls_nMaxTravelLevel = 10;

		int nMaxLevel = (lua_isnumber(pState, 4)) ? (int)lua_tonumber(pState, 4) : ls_nMaxTravelLevel;

		set<const void*> tbl;
		int level = -1;

		uint64 uBegin = GetProcessTime();

		//在travel的时候必须得关闭thread watch功能,因为travel可能会很慢
		HTHREAD hThread;
		GetCurrentThread(&hThread);
		EnableWatchThread(hThread, false);

		if (lua_istable(pState, 5))
		{
			//filter object table
			lua_checkstack(pState, 3);

			lua_pushnil(pState);

			while (lua_next(pState, -2))
			{
				const void* p = lua_topointer(pState, -1);
				tbl.insert(p);
				lua_pop(pState, 1);
			}
		}

		lua_settop(pState, 1);

		CLog* pLog = CLog::CreateLog(name.c_str(), mode);
		pLog->EnableTime(false);

		uint32 uDataNum = 0;
		_TraverseTable(pState, tbl, pLog, level, nMaxLevel, uDataNum);
		EnableWatchThread(hThread, true);

		uint64 uEnd = GetProcessTime();
		
		ostringstream strm;
		strm << endl;
		strm << "遍历花费时间: " << uEnd - uBegin << " 毫秒" << endl;

		pLog->Write(strm.str().c_str());

		pLog->Release();

		return 0;
	}