void lagcompensation::create_move() {
	last_eye_positions.push_front(g_ctx.m_local->get_eye_pos());
	if (last_eye_positions.size() > 128)
		last_eye_positions.pop_back();

	auto nci = g_csgo.m_engine()->GetNetChannelInfo();
	if (!nci)
		return;

	const int latency_ticks = TIME_TO_TICKS(nci->GetLatency(FLOW_OUTGOING));
	const auto latency_based_eye_pos = last_eye_positions.size() <= latency_ticks ? last_eye_positions.back() : last_eye_positions[latency_ticks];

	for (int i = 1; i <= g_csgo.m_globals()->m_maxclients; i++) {
		auto e = static_cast<player_t *>(g_csgo.m_entitylist()->GetClientEntity(i));
		auto & player = players[i];

		player.m_e = e;

		if (!e) { player.m_e = nullptr; continue; }

		if (!e->valid(true))
			continue;

		player.m_resolver->m_e = e;
		player.m_resolver->create_move(latency_based_eye_pos);
	}
}
std::deque< tickrecord_t > player_record_t::get_valid_track() {
	auto delta_time = [&](float simulation_time) -> float {
		auto nci = g_csgo.m_engine()->GetNetChannelInfo();

		if (!nci)
			return FLT_MAX;

		float correct = 0.f;
		correct += nci->GetLatency(FLOW_OUTGOING);
		correct += nci->GetLatency(FLOW_INCOMING);
		correct += util::lerp_time();
		correct = math::clamp< float >(correct, 0.f, 1.f);

		return fabs(correct - (g_csgo.m_globals()->m_curtime - simulation_time));
	};

	std::deque< tickrecord_t > track;

	for (const auto & record : m_track) {
		if (delta_time(record.m_simulation_time) < 0.2f)
			track.push_back(record);
	}

	return track;
}
Exemple #3
0
void	HP_Stream::GetPropertyData(const AudioObjectPropertyAddress& inAddress, UInt32 inQualifierDataSize, const void* inQualifierData, UInt32& ioDataSize, void* outData) const
{
	//	take and hold the state mutex
	CAMutex::Locker theStateMutex(const_cast<HP_Device*>(mOwningDevice)->GetStateMutex());
	
	switch(inAddress.mSelector)
	{
		case kAudioObjectPropertyName:
			ThrowIf(ioDataSize != GetPropertyDataSize(inAddress, inQualifierDataSize, inQualifierData), CAException(kAudioHardwareBadPropertySizeError), "HP_Stream::GetPropertyData: wrong data size for kAudioObjectPropertyName");
			*static_cast<CFStringRef*>(outData) = CopyStreamName();
			break;
			
		case kAudioObjectPropertyManufacturer:
			ThrowIf(ioDataSize != GetPropertyDataSize(inAddress, inQualifierDataSize, inQualifierData), CAException(kAudioHardwareBadPropertySizeError), "HP_Stream::GetPropertyData: wrong data size for kAudioObjectPropertyName");
			*static_cast<CFStringRef*>(outData) = CopyStreamManufacturerName();
			break;
			
		case kAudioObjectPropertyElementName:
			ThrowIf(ioDataSize != GetPropertyDataSize(inAddress, inQualifierDataSize, inQualifierData), CAException(kAudioHardwareBadPropertySizeError), "HP_Stream::GetPropertyData: wrong data size for kAudioObjectPropertyElementName");
			*static_cast<CFStringRef*>(outData) = CopyElementFullName(inAddress);
			break;
			
		case kAudioObjectPropertyElementCategoryName:
			ThrowIf(ioDataSize != GetPropertyDataSize(inAddress, inQualifierDataSize, inQualifierData), CAException(kAudioHardwareBadPropertySizeError), "HP_Stream::GetPropertyData: wrong data size for kAudioObjectPropertyElementCategoryName");
			*static_cast<CFStringRef*>(outData) = CopyElementCategoryName(inAddress);
			break;
			
		case kAudioObjectPropertyElementNumberName:
			ThrowIf(ioDataSize != GetPropertyDataSize(inAddress, inQualifierDataSize, inQualifierData), CAException(kAudioHardwareBadPropertySizeError), "HP_Stream::GetPropertyData: wrong data size for kAudioObjectPropertyElementNumberName");
			*static_cast<CFStringRef*>(outData) = CopyElementNumberName(inAddress);
			break;
			
		case kAudioStreamPropertyDirection:
			ThrowIf(ioDataSize != GetPropertyDataSize(inAddress, inQualifierDataSize, inQualifierData), CAException(kAudioHardwareBadPropertySizeError), "HP_Stream::GetPropertyData: wrong data size for kAudioStreamPropertyDirection");
			*static_cast<UInt32*>(outData) = IsInput() ? 1 : 0;
			break;
			
		case kAudioStreamPropertyTerminalType:
			ThrowIf(ioDataSize != GetPropertyDataSize(inAddress, inQualifierDataSize, inQualifierData), CAException(kAudioHardwareBadPropertySizeError), "HP_Stream::GetPropertyData: wrong data size for kAudioStreamPropertyTerminalType");
			*static_cast<UInt32*>(outData) = GetTerminalType();
			break;
			
		case kAudioStreamPropertyStartingChannel:
			ThrowIf(ioDataSize != GetPropertyDataSize(inAddress, inQualifierDataSize, inQualifierData), CAException(kAudioHardwareBadPropertySizeError), "HP_Stream::GetPropertyData: wrong data size for kAudioStreamPropertyStartingChannel");
			*static_cast<UInt32*>(outData) = GetStartingDeviceChannelNumber();
			break;
			
		case kAudioStreamPropertyLatency:
			ThrowIf(ioDataSize != GetPropertyDataSize(inAddress, inQualifierDataSize, inQualifierData), CAException(kAudioHardwareBadPropertySizeError), "HP_Stream::GetPropertyData: wrong data size for kAudioStreamPropertyLatency");
			*static_cast<UInt32*>(outData) = GetLatency();
			break;
			
		default:
			HP_Object::GetPropertyData(inAddress, inQualifierDataSize, inQualifierData, ioDataSize, outData);
			break;
	};
}
void WorldSession::HandleReportLag(WorldPacket& recvData)
{
    // just put the lag report into the database...
    // can't think of anything else to do with it
    uint32 lagType, mapId;
    recvData >> lagType;
    recvData >> mapId;
    float x, y, z;
    recvData >> x;
    recvData >> y;
    recvData >> z;

    PreparedStatement* stmt = CharacterDatabase.GetPreparedStatement(CHAR_INS_LAG_REPORT);
    stmt->setUInt32(0, GUID_LOPART(GetPlayer()->GetGUID()));
    stmt->setUInt8 (1, lagType);
    stmt->setUInt16(2, mapId);
    stmt->setFloat (3, x);
    stmt->setFloat (4, y);
    stmt->setFloat (5, z);
    stmt->setUInt32(6, GetLatency());
    stmt->setUInt32(7, time(NULL));
    CharacterDatabase.Execute(stmt);
}
Exemple #5
0
int WDL_ConvolutionEngine_Div::SetImpulse(WDL_ImpulseBuffer *impulse, int maxfft_size, int known_blocksize, int max_imp_size, int impulse_offset, int latency_allowed)
{
  m_need_feedsilence=true;

  m_engines.Empty(true);
  if (maxfft_size<0)maxfft_size=-maxfft_size;
  maxfft_size*=2;
  if (!maxfft_size || maxfft_size>32768) maxfft_size=32768;


  const int MAX_SIZE_FOR_BRUTE=64;

  int fftsize = MAX_SIZE_FOR_BRUTE;
  int impulsechunksize = MAX_SIZE_FOR_BRUTE;

  if (known_blocksize && !(known_blocksize&(known_blocksize-1)) && known_blocksize>MAX_SIZE_FOR_BRUTE*2)
  {
    fftsize=known_blocksize/2;
    impulsechunksize=known_blocksize/2;
  }
  if (latency_allowed*2 > fftsize)
  {
    int x = 16;
    while (x <= latency_allowed) x*=2;
    if (x>32768) x=32768;
    fftsize=impulsechunksize=x;
  }

  int offs=0;
  int samplesleft=impulse->impulses[0].GetSize()-impulse_offset;
  if (max_imp_size>0 && samplesleft>max_imp_size) samplesleft=max_imp_size;

  do
  {
    WDL_ConvolutionEngine *eng=new WDL_ConvolutionEngine;

    bool wantBrute = !latency_allowed && !offs;
    if (impulsechunksize*(wantBrute ? 2 : 3) >= samplesleft) impulsechunksize=samplesleft; // early-out, no point going to a larger FFT (since if we did this, we wouldnt have enough samples for a complete next pass)
    if (fftsize>=maxfft_size) { impulsechunksize=samplesleft; fftsize=maxfft_size; } // if FFTs are as large as possible, finish up

    eng->SetImpulse(impulse,fftsize,offs+impulse_offset,impulsechunksize, wantBrute);
    eng->m_zl_delaypos = offs;
    eng->m_zl_dumpage=0;
    m_engines.Add(eng);

#ifdef WDLCONVO_ZL_ACCOUNTING
    char buf[512];
    wsprintf(buf,"ce%d: offs=%d, len=%d, fftsize=%d\n",m_engines.GetSize(),offs,impulsechunksize,fftsize);
    OutputDebugString(buf);
#endif

    samplesleft -= impulsechunksize;
    offs+=impulsechunksize;

#if 1 // this seems about 10% faster (maybe due to better cache use from less sized ffts used?)
    impulsechunksize=offs*3;
    fftsize=offs*2;
#else
    impulsechunksize=fftsize;

    fftsize*=2;
#endif
  }
  while (samplesleft > 0);
  
  return GetLatency();
}
HRESULT CScreenCaptureSourcePin::FillBuffer(IMediaSample *pSample)
{
	FTL::FTLThreadWaitType waitType = _GetWaitType(INFINITE);
	switch (waitType)
	{
	case FTL::ftwtStop:
		return S_FALSE;	//quit
	case FTL::ftwtError:
		return E_UNEXPECTED;
	//case FTL::ftwtContinue:
	//case FTL::ftwtTimeOut:
	default:
		//just continue
		break;
	}

#if 0
	//FUNCTION_BLOCK_TRACE(1);
	CheckPointer(pSample, E_POINTER);
	ASSERT(m_mt.formattype == FORMAT_VideoInfo);

	m_nFrameNumber++;

	//make the samples scheduling
	HRESULT hr = S_OK;
	REFERENCE_TIME	rtLatency = 0;
	if (FAILED(GetLatency(&rtLatency)))
	{
		rtLatency = UNITS / DEFAULT_FPS ;
	}
	REFERENCE_TIME rtStart, rtStop;
	BOOL bShouldDeliver = FALSE;
	do 
	{
		if (m_dwAdviseToken == 0)
		{
			DX_VERIFY(m_pClock->GetTime(&m_rtClockStart));
			//fixed frame rate, so can use AdvisePeriodic
			DX_VERIFY(m_pClock->AdvisePeriodic(m_rtClockStart + rtLatency, 
				rtLatency, (HSEMAPHORE)m_hSemaphore, &m_dwAdviseToken));
		}
		else
		{
			DWORD dwResult = WaitForSingleObject(m_hSemaphore, INFINITE);
		}

		bShouldDeliver = TRUE;
		rtStart = m_rtStart;
		rtStop = m_rtStart + 1;
		DX_VERIFY(pSample->SetTime(&rtStart, &rtStop));
		FTLASSERT(m_pScreenCaptureImpl);
		if (m_pScreenCaptureImpl)
		{
			LPBYTE pBuffer = NULL;
			DX_VERIFY(pSample->GetPointer(&pBuffer));
			VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)m_mt.pbFormat;
			//int nSize = min(pVih->bmiHeader.biSizeImage, (DWORD)cbData);
			HBITMAP hDIB = m_pScreenCaptureImpl->CopyScreenToBitmap(&m_rcCapture, pBuffer, (BITMAPINFO *) &(pVih->bmiHeader));
			DeleteObject(hDIB);
		}

		DX_VERIFY(m_pClock->GetTime(&m_rtClockStop));
		DX_VERIFY(pSample->GetTime(&rtStart, &rtStop));

		if (rtLatency > 0 && rtLatency * 3 < m_rtClockStop - m_rtClockStart)
		{
			//Why?
			m_rtClockStop = m_rtClockStart + rtLatency;
		}
		rtStop = rtStart + (m_rtClockStop - m_rtClockStart);
		m_rtStart = rtStop;
		//lock (m_csPinLock)
		{
			rtStart -= m_rtStreamOffset;
			rtStop -= m_rtStreamOffset;
		}

		DX_VERIFY(pSample->SetMediaTime(&m_nFrameNumber, &m_nFrameNumber));
		DX_VERIFY(pSample->SetTime(&rtStart, &rtStop));
		m_rtClockStart = m_rtClockStop;

		bShouldDeliver = ((rtStart >= 0) && (rtStop >= 0));
		if (bShouldDeliver)
		{
			//lock (m_csPinLock)
			if (m_rtStartAt != -1)
			{
				if (m_rtStartAt > rtStart)
				{
					bShouldDeliver = FALSE;
				}
				else
				{
					if (m_dwStartCookie != 0 && !m_bStartNotified)
					{
						m_bStartNotified = TRUE;
						DX_VERIFY(m_pFilter->NotifyEvent(EC_STREAM_CONTROL_STARTED, (LONG_PTR)this, m_dwStartCookie));
						if (FAILED(hr)) 
						{
							return hr;
						}
					}
				}
			}
			if (!bShouldDeliver)
			{
				//Why?
				continue;
			}
			if (m_rtStopAt != -1)
			{
				if (m_rtStopAt < rtStart)
				{
					if (!m_bStopNotified)
					{
						m_bStopNotified = TRUE;
						if (m_dwStopCookie != 0)
						{
							DX_VERIFY(m_pFilter->NotifyEvent(EC_STREAM_CONTROL_STOPPED, (LONG_PTR)this, m_dwStopCookie));
							if (FAILED(hr))
							{
								return hr;
							}
						}
						bShouldDeliver = m_bShouldFlush;
					}
					else
					{
						bShouldDeliver = FALSE;
					}
					// EOS -- EndOfStream
					if (!bShouldDeliver)
					{
						return S_FALSE;
					}
				}
			}
		}

	} while (!bShouldDeliver);
	return hr;

	//DX_VERIFY(m_pFilter->StreamTime(rtStart));

	//LONGLONG llStartTime = m_ElapseCounter.GetElapseTime();
	//REFERENCE_TIME rtStreamTime = m_rtSampleTime;// llStartTime / 100; // rfStreamTime.GetUnits();
	
//loop:   
	//REFERENCE_TIME rtStart = rtStreamTime; //m_iFrameNumber * m_rtFrameLength;    
	//REFERENCE_TIME rtStop  = rtStart + m_rtFrameLength;
	
	//if (rtStreamTime > rtStop)
	//{
	//	OutputDebugString(L"lost capture \r\n");
	//	++m_iFrameNumber;
	//	goto loop;
	//}
	//while (rtStreamTime < rtStart)
	//{
	//	m_pFilter->StreamTime(rfStreamTime);
	//	rtStreamTime = rfStreamTime.GetUnits();
	//	// REFERENCE_TIME rtWaitTime = rtStart - rtStreamTime;
	//	// ::WaitForSingleObject(m_hWaitEvent, rtWaitTime/10000);
	//}

	BYTE *pData = NULL;
	long cbData = 0;

	{
		DX_VERIFY(pSample->GetPointer(&pData));
		cbData = pSample->GetSize();
		//if (m_bZeroMemory)
		//{
		//	ZeroMemory(pData, cbData);
		//}
		{
			CAutoLock cAutoLockShared(&m_cSharedState);

			ASSERT(m_mt.formattype == FORMAT_VideoInfo);
			VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)m_mt.pbFormat;

			int nSize = min(pVih->bmiHeader.biSizeImage, (DWORD)cbData);

			//*
			HBITMAP hDib = m_pScreenCaptureImpl->CopyScreenToBitmap(&m_rcCapture, pData, (BITMAPINFO *) &(pVih->bmiHeader));
			if (hDib)
			{
				DeleteObject(hDib);
			}
			//CRefTime rtStart = rfStreamTime; //m_rtSampleTime;
			//m_rtSampleTime +=   (LONG) m_iRepeatTime;
			CRefTime rtStop;// = m_ElapseCounter.GetElapseTime() / 100;
			
			DX_VERIFY(m_pFilter->StreamTime(rtStop));

			//m_rtSampleTime = rtStop;

			//ATLTRACE(TEXT("CScreenCaptureSourcePin::FillBuffer , start=%lld(%f ms), stop=%lld(%f ms)\n"),
			//	rtStart, float(rtStart) / 10000,  rtStop, float(rtStop) / 10000);

			DX_VERIFY(pSample->SetTime((REFERENCE_TIME *)&rtStart, (REFERENCE_TIME *)&rtStop));

			//每一帧都是一个同步点
			DX_VERIFY(pSample->SetSyncPoint(TRUE));


			BOOL bWait = FALSE;
			DWORD dwWillWaitTime = 0;
			//LONGLONG llElapseTime = rtStop.GetUnits() - rtStart.GetUnits();
			//	//m_ElapseCounter.GetElapseTime() -  llStartTime;
			//if ( llElapseTime < MILLISECONDS_TO_100NS_UNITS(m_iRepeatTime))
			//{
			//	bWait = TRUE;
			//	dwWillWaitTime = (MILLISECONDS_TO_100NS_UNITS(m_iRepeatTime) - llElapseTime) / 10000;
			//	if (dwWillWaitTime > 1)
			//	{
			//		//WaitForSingleObject(m_hStopEvent, dwWillWaitTime );
			//	}
			//}

		}
	}

	//FTLTRACE(TEXT("llElapseTime = %lld, bWait=%d, dwWillWaitTime=%d\n"), llElapseTime, bWait, dwWillWaitTime);
#endif

	CheckPointer(pSample, E_POINTER);

	HRESULT hr = E_FAIL;

	CRefTime rfStreamTime;
	{
		//CAutoLock cObjectLock(m_pLock);
		DX_VERIFY(m_pFilter->StreamTime(rfStreamTime));
	}
	REFERENCE_TIME rtStreamTime = rfStreamTime.GetUnits();
	if (m_rfMaxRecordTime != 0 && rtStreamTime > m_rfMaxRecordTime)
	{
		//max time over

		//if there is preview window, just return S_FALSE is OK
		//if there is NOT preview window, can not stop graph automatic
		m_pFilter->NotifyEvent(TIME_OVER, static_cast<LONG_PTR>(m_rfMaxRecordTime / (UNITS / MILLISECONDS)), 0);
		return S_FALSE;
	}
	REFERENCE_TIME rtStart = 0; 
	REFERENCE_TIME rtStop = 0;

	do 
	{
		rtStart = m_nFrameNumber * m_nAvgTimePerFrame;
		rtStop = rtStart + m_nAvgTimePerFrame;
		if( rtStreamTime > rtStop)
		{
			OutputDebugString(L"lost capture \r\n");
			++m_nFrameNumber;
		}
	} while (rtStreamTime > rtStop);

	while (rtStreamTime < rtStart)
	{
		m_pFilter->StreamTime(rfStreamTime);
		rtStreamTime = rfStreamTime.GetUnits();
		// REFERENCE_TIME rtWaitTime = rtStart - rtStreamTime;
		// ::WaitForSingleObject(m_hWaitEvent, rtWaitTime/10000);
	}

	BYTE *pData = NULL;
	long cbData = 0;
	CAutoLock cAutoLockShared(&m_cSharedState);
	DX_VERIFY(pSample->GetPointer(&pData));
	cbData = pSample->GetSize();

	ASSERT(m_mt.formattype == FORMAT_VideoInfo);
	VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)m_mt.pbFormat;


	int nSize = min(pVih->bmiHeader.biSizeImage, (DWORD)cbData);
	HBITMAP hDib = m_pScreenCaptureImpl->CopyScreenToBitmap(&m_rcCapture, pData, (BITMAPINFO *) &(pVih->bmiHeader));    

	if (hDib)
	{
		if (m_bFirstFrame)
		{
			m_bFirstFrame = FALSE;
			DX_VERIFY(m_pFilter->NotifyEvent(FIRST_FRAME, (LONG_PTR)(hDib), NULL));
		}
		else
		{
			DeleteObject(hDib);
		}
	}  

	//REFERENCE_TIME rtTemp;
	//IReferenceClock *pClock;
	//m_pFilter->GetSyncSource(&pClock);
	//pClock->GetTime(&rtTemp);
	//pClock->Release();    

	//ST_FRAME_TIME *pTmp = new ST_FRAME_TIME();
	//pTmp->llStartTime = rtStart;
	//pTmp->llStopTime = rtStop;
	//pTmp->nFrameIndex = m_iFrameNumber;    
	//m_pFilter->NotifyEvent(FRAME_TIME, (LONG_PTR)pTmp, NULL);

	DX_VERIFY(pSample->SetTime(&rtStart, &rtStop));
	m_nFrameNumber++;

	DX_VERIFY(pSample->SetSyncPoint(TRUE));

	return S_OK;

	return hr;
}
Exemple #7
0
void MULTIPLAY::Update(double inc)
{	
	if (MP_DBGDEEP)
		cout << "multiplay update" << endl;
	
	//create packet arrays if necessary
	int i;
	for (i = 0; i < NumConnected() + 1; i++)
	{
		if (packetarrays[i] == NULL)
		{
			packetarrays[i] = new REPLAY_PACKET [PACKET_ARRAY_SIZE];
		}
	}
	
	if (MP_DBGDEEP)
		cout << "packet mem allocated" << endl;
	
	bool oldc = Connected();
	net.Update();
	if (!oldc && Connected())
	{
		//wasn't connected before, connected now.
		if (Server())
			remote_players++;
		
		ExchangeWorldInfo();
		
		int i;
		for (i = 0; i < MAX_PLAYERS; i++)
		{
			timeindex[i] = 0.0;
			loadstates[i].time = 0.0;
			loadstatenow[i] = false;
			numpackets[i] = 0;
			
			//packetarraytime[i] = 0.0;
			
			nooptime[i] = 0;
			noopvalid[i] = false;
			tickthisframe[i] = false;
			nooptick[i] = false;
		}
		
		dbgnumstates = 0;
		dbgnumpackets = 0;
		
		mq1.Clear();
		mq1.AddMessage("A client successfully connected");
	}
	
	if (MP_DBGDEEP)
		cout << "net updated" << endl;
	
	//read incoming data
	/*if (Connected() && net.NumBufferedPackets() > 0)
	{
		int i;
		for (i = 0; i < net.GetMaxBuffers(); i++)
		{
			if (net.GetBuffer(i)->Valid())
				ProcessPacket(net.GetBuffer(i));
		}
	}*/
	if (Connected())
	{
		if (!MP_DISABLEGET)
		{		
			ReceiveState();
		
			if (MP_DBGDEEP)
				cout << "state receive" << endl;
			
			ReceivePacketArray();
			
			if (MP_DBGDEEP)
				cout << "packet array receive" << endl;
		}
		
		if (!MP_DISABLEFUNCUPDATE)
		{
			double tval = 0;
			string ticktype = "packet array";
			nooptick[1] = false;
			if (PacketArrayValid(1))
				tval = GetPacketArrayTime(1);
			if (noopvalid[1] && nooptime[1] > tval)
			{
				tval = nooptime[1];
				ticktype = "noop";
				nooptick[1] = true;
			}
			if ((noopvalid[1] || PacketArrayValid(1)) && timeindex[1] < tval + PACKET_ARRAY_FREQUENCY - FrameTime()/2.0)
			{
				tickthisframe[1] = true;
				
				/*int nextpacket = curpackets[1];
				if (nextpacket >= numpackets[1])
					nextpacket = numpackets[1] - 1;
				if (ticktype == "packet array")
					cout << "ticking " << ticktype << ": " << GetFuncMem(1)[GetPacketArray(1)[nextpacket].chardata[CHAR_FUNCNUM]].func_name << " " << curpackets[1] << "/" << numpackets[1] << " packets for " << tval << " at " << timeindex[1] << endl;
				else
					cout << "ticking " << ticktype << " for " << tval << " at " << timeindex[1] << endl;*/
				
				timeindex[1] += inc;
				
				//process packet data into the function memory (which is then given to DoOp by vamosworld)
				//if (PacketArrayValid(1))// && !(ticktype == "noop"))
				int i;
				for (i = 0; i < fnums[1]; i++)
				{
					if (!GetFuncMem(1)[i].held && GetFuncMem(1)[i].active)
						GetFuncMem(1)[i].active = false;
				}
				
				UpdateFuncmem(1);
			}
			else
			{
				tickthisframe[1] = false;
				//cout << "not ticking at " << timeindex[1] << endl;
			}
		}
		
		if (MP_DBGDEEP)
			cout << "ticked" << endl;
		
		//check to see if we need to increment		
		/*for (i = 0; i < NumConnected(); i++)
		{
			if (loadstatevalid[i+1] && timeindex[i+1] < loadstates[i+1].time + STATE_FREQUENCY + FrameTime()/2.0)
			{
				timeindex[i+1] += inc;
			}
		}*/
		//if (packetarrayvalid[1] && timeindex[1] < loadstates[1].time + STATE_FREQUENCY - FrameTime()/2.0)
	}

	//cout << "Latency: " << GetLatency(1) << " (" << timeindex[0] << "-" << timeindex[1] << ")" << endl;
	
	//disconnect if the latency is super high
	if (GetLatency(1) > CLIENT_DISCONNECT_TIMEOUT)
	{
		Disconnect();
	}
	
	//update statistics
	UpdateStats();
	
	if (MP_DBGDEEP)
			cout << "multiplay update done" << endl;
}