Exemplo n.º 1
0
bool RamAi::GameMonteCarloTree::PartialExpansion(const TreeNode &parent) const
{
	//From http://julian.togelius.com/Jacobsen2014Monte.pdf
	if (!parent.IsLeaf())
	{
		const double numberOfChildren = static_cast<double>(parent.GetNumberOfChildren());
		const double numberOfVisits = static_cast<double>(parent.GetScore().GetVisits());

		const double expansionWeighting = sqrt((2.0 * log(numberOfVisits)) / (1.0 + numberOfChildren));
		const double expansionWeightingWithBias = GetBias() * expansionWeighting;

		const double unexpandedNodesValue = AiSettings::GetData().partialExpansionBase;
		const double expansionUrgencyScore = unexpandedNodesValue + expansionWeightingWithBias;

		//Check the expansion urgency score of the parent node with each of its children.
		//If the expansion urgency is greater than the confidence of any of its children, then the node is expanded (returns true).
		for (auto it = parent.GetIteratorBegin(); it != parent.GetIteratorEnd(); ++it)
		{
			const double uctScore = CalculateUcbScore(parent, it->second);

			if (expansionUrgencyScore > uctScore)
			{
				return true;
			}
		}

		return false;
	}
	else
	{
		return true;
	}
}
Exemplo n.º 2
0
__int64 SUBTITLESFROMMATROSKA::GetNextTimecode()
{
	__int64 iNTC = info.m->GetNextTimecode(info.iStream);
	if (iNTC!=TIMECODE_UNKNOWN) {
		iNTC=iNTC*info.m->GetTimecodeScale()/GetTimecodeScale();
	}
	return (iNTC!=TIMECODE_UNKNOWN)?iNTC + GetBias():iNTC;
}
Exemplo n.º 3
0
// recalculation of the delta value for the reference clock
INT64 SynchCorrection::GetCorrectedTimeDelta(INT64 time, REFERENCE_TIME rtAHwTime, REFERENCE_TIME rtRCTime)
{
  double deltaTime = 0;  
  {
    CAutoLock lock(&m_csDeltaLock);
    deltaTime = time * GetAdjustment() * GetBias() + m_dDeltaError;
    m_dDeltaError = deltaTime - floor(deltaTime);
  }
  return (INT64) deltaTime;
}
Exemplo n.º 4
0
INT64 SynchCorrection::CalculateDrift(REFERENCE_TIME rtAHwTime, REFERENCE_TIME rtRCTime)
{
  rtAHwTime -= m_rtAHwStart;

  if (m_pSettings->GetLogDebug())
    Log ("SynchCorrection::CalculateDrift Size: %4u rtAHwTime: %10.8f rtRCTime: %10.8f m_rtStart: %10.8f",
      m_qSampleTimes.size(),rtAHwTime / 10000000.0, rtRCTime / 10000000.0, m_rtStart / 10000000.0);

  if (rtRCTime < 0)
    return 0;

  double bias = GetBias();
  if (!m_rtAHwStartSet)
  {
    m_rtAHwStart = rtAHwTime - rtRCTime / bias;
    m_rtAHwStartSet = true;
    rtAHwTime -= m_rtAHwStart;
  }

  REFERENCE_TIME preCalculatedTime = GetReferenceTimeFromAudioSamples(rtAHwTime);
  return preCalculatedTime - rtRCTime + (m_rtQueueDuration - m_rtQueueAdjustedDuration * bias);
}
Exemplo n.º 5
0
int AUDIOSOURCELIST::Read(void* lpDest, DWORD dwMicrosecDesired, DWORD* lpdwMicrosecRead,
						  __int64* lpqwNanosecRead, __int64* lpiTimecode, ADVANCEDREAD_INFO* lpAARI)
{
	__int64 j;
	if (!lpqwNanosecRead) lpqwNanosecRead = &j;

	// can read sth from current source?
	if (!info.active_source->IsEndOfStream()) {
		int res = info.active_source->Read(lpDest,dwMicrosecDesired,
			lpdwMicrosecRead,lpqwNanosecRead,lpiTimecode,lpAARI);

		/* if no real data was read and suddently the end of the stream
		   has been reached, the end was crap */
		if (res == 0 && info.active_source->IsEndOfStream()) {
			return Read(lpDest, dwMicrosecDesired, lpdwMicrosecRead,
				lpqwNanosecRead, lpiTimecode, lpAARI);
		}

		if (lpiTimecode) {
			SetCurrentTimecode(*lpiTimecode * info.active_source->GetTimecodeScale(), TIMECODE_UNSCALED);
			*lpiTimecode = GetCurrentTimecode();
		}
		if (lpAARI) {
			if (lpAARI->iNextTimecode != TIMECODE_UNKNOWN) {
	//			lpAARI->iNextTimecode = lpAARI->iNextTimecode*info.active_source->GetTimecodeScale() / GetTimecodeScale() + GetBias();
			}
		}
		if (lpAARI) {
			// end reached -> current audio frame was last one of active source
			if (info.active_source->IsEndOfStream()) {
				// is there another audio source?
				if (info.iActiveSource < info.iCount-1) {
					AUDIOSOURCE* next = info.audiosources[info.iActiveSource+1];
					// join seemless? -> set BIAS for next source accordingly
					if (IsSeamless()) {
						next->SetBias(GetCurrentTimecode() * GetTimecodeScale()+
							*lpqwNanosecRead, BIAS_UNSCALED);
					}
					lpAARI->iNextTimecode = next->GetBias(BIAS_UNSCALED) / GetTimecodeScale();
				}
			} else {
				if (lpAARI->iNextTimecode != TIMECODE_UNKNOWN) {
					lpAARI->iNextTimecode *= info.active_source->GetTimecodeScale();
					lpAARI->iNextTimecode /= GetTimecodeScale();
					lpAARI->iNextTimecode += GetBias();
				}
			}
		}

		return res;
	} else {
		// end of list?
		if (info.iActiveSource >= info.iCount-1) {
			return AS_ERR;
		} else {
		// one more file available

			/* create log entry */
			char cTime[64]; memset(cTime, 0, sizeof(cTime));
			Millisec2Str(info.active_source->GetCurrentTimecode() *
				info.active_source->GetTimecodeScale() / 1000000, cTime);

			char cName[1024]; memset(cName, 0, sizeof(cName));
			GetName(cName);

			char cMsg[2048]; memset(cMsg, 0, sizeof(cMsg));
			sprintf(cMsg, "End of stream encountered\nName: %s\nTimecode: %s",
				cName, cTime);

		//	GetApplicationTraceFile()->Trace(TRACE_LEVEL_NOTE, "End of stream", cMsg);


			info.active_source = info.audiosources[++info.iActiveSource];
			if (IsSeamless()) {
				info.active_source->SetBias(info.audiosources[info.iActiveSource-1]->GetCurrentTimecode());
			}
			return Read(lpDest,dwMicrosecDesired,lpdwMicrosecRead,lpqwNanosecRead,lpiTimecode,lpAARI);
		}
	}
}
Exemplo n.º 6
0
int SUBTITLESFROMMATROSKA::Read(void* lpDest, int* iSize, __int64* lpiTimecode,
							ADVANCEDREAD_INFO* lpAARI)
{
	READ_INFO	r;

	// if SRT: read text and return as result
	if (GetFormat() == SUBFORMAT_SRT) {
		info.m->SetActiveTrack(info.iStream);
		if (info.m->Read(&r)==READBL_OK) {
			if (iSize) *iSize = r.pData->GetSize();
			if (lpDest) memcpy(lpDest,r.pData->GetData(),r.pData->GetSize());
			if (lpiTimecode) {
				*lpiTimecode = (r.qwTimecode*info.m->GetTimecodeScale() + GetBias(BIAS_UNSCALED))/GetTimecodeScale();
			}
			if (lpAARI) {
				lpAARI->iDuration = r.qwDuration * info.m->GetTimecodeScale() / GetTimecodeScale();
			}
			DecBufferRefCount(&r.pData);
		} else {
			if (iSize) *iSize = 0;
		}
	}
	// if SRT: read text and return as result
	if (GetFormat() == SUBFORMAT_VOBSUB) {
		info.m->SetActiveTrack(info.iStream);
		if (info.m->Read(&r)==READBL_OK) {
			if (iSize) *iSize = r.pData->GetSize();
			if (lpDest) memcpy(lpDest,r.pData->GetData(),r.pData->GetSize());
			if (lpiTimecode) {
				*lpiTimecode = (r.qwTimecode*info.m->GetTimecodeScale() + GetBias(BIAS_UNSCALED))/GetTimecodeScale();
			}
			if (lpAARI) {
				lpAARI->iDuration = r.qwDuration * info.m->GetTimecodeScale() / GetTimecodeScale();
			}
			DecBufferRefCount(&r.pData);
		} else {
			if (iSize) *iSize = 0;
		}
	}

	// if SSA: split style apart!
	if (GetFormat() == SUBFORMAT_SSA) {
		info.m->SetActiveTrack(info.iStream);
		if (info.m->Read(&r)==READBL_OK) {
			char* lpcDest = (char*)lpDest;
			
			int j = 2; char* c = (char*)r.pData->GetData(); char* cStyle;
			while (*c++ != ',' || --j);
			cStyle = c;
			while (*c++ != ',');
			*(c-1)=0;

			int i = strlen(c);
			if (iSize) *iSize = i;
			if (lpDest) memcpy(lpcDest+4,c,i+1);
			memset(lpcDest,0,4);

			if (lpiTimecode) {
				*lpiTimecode = (r.qwTimecode*info.m->GetTimecodeScale() + GetBias(BIAS_UNSCALED))/GetTimecodeScale();
			}
//			if (lpiTimecode) *lpiTimecode = r.qwTimecode + GetBias();
			if (lpAARI) {
				lpAARI->iDuration = r.qwDuration * info.m->GetTimecodeScale() / GetTimecodeScale();
			}

			SSA_STYLE* style = NULL;
			
			for (i=0;i<GetSSAStyleCount();i++) {
				char* stn = cStyle;
				if (!strcmp(GetSSAStyle(i)->sssStruct.lpcName,cStyle)) {
					style = GetSSAStyle(i);
				}
				if (stn[0]=='*' && !strcmp(stn+1,GetSSAStyle(i)->sssStruct.lpcName)) {
					style = GetSSAStyle(i);
				}
			}
			memcpy(lpDest,&style,sizeof(style));

			DecBufferRefCount(&r.pData);
		} else return SUBS_ERR;
	}

	return SUBS_OK;
}