void CLWSamplingInfEngine::
EnterEvidenceProbability( floatVecVector *pEvidenceProbIn )
{
	PNL_CHECK_IS_NULL_POINTER(pEvidenceProbIn);

	int i, j;
	float w;
	int iSamples = pEvidenceProbIn->size();
	int iSampleSize = m_currentEvVec.size();
	
	if(iSamples != iSampleSize) return;

	for( i = 0; i < iSamples; i++)
	{
		w = 1;
			
		for( j = 0; j < pEvidenceProbIn[i].size(); j++)
		{
		 	w = w * (*pEvidenceProbIn)[i][j];
		}

		m_particleWeight[i] = w;
	}
	
	NormalizeWeight();
}
void CLWSamplingInfEngine::
EnterEvidence( const CEvidence *pEvidenceIn , int maximize , int sumOnMixtureNode  )
{
    PNL_CHECK_IS_NULL_POINTER(pEvidenceIn);
  
    LWSampling(pEvidenceIn);

	// Given evidencs, calculate particle weight by CPD 
	float w;
	int iSampleSize = m_currentEvVec.size();
	const int* ObsNodes = pEvidenceIn->GetAllObsNodes();
	int NumberObsNodes  = pEvidenceIn->GetNumberObsNodes();

	int i, j;

	for( i = 0; i < iSampleSize; i++)
	{
		w = 0;
		for( j = 0; j < NumberObsNodes; j++)
		{
			if(pEvidenceIn->IsNodeObserved(ObsNodes[j]))
			{
				CFactor* pFactor = m_pGraphicalModel->GetFactor(ObsNodes[j]);
		 		w = w +  pFactor->GetLogLik( m_currentEvVec[i]); 
			}
		}

		m_particleWeight[i] = (float)exp(w);
	}

	NormalizeWeight();
}
float CLWSamplingInfEngine::GetNeff()
{
	if( m_bNormalized == false) NormalizeWeight();

	int i;
	float totalWeight2 = 0;
	for(i = 0; i < m_particleCount; i++)
	{
		totalWeight2 += m_particleWeight[i]*m_particleWeight[i];
	}

	if(totalWeight2 <= 0) return -1;

	return 1/totalWeight2;
}
Exemplo n.º 4
0
/*
 * This is the most important function in localization. go through the poles in pole list
 * and calculate weight and make decisions...
 */
void Algorithm::ParticleFilter(list<PoleData> & poleList) //i have array of single poles
{
	/*
	 * if there is only one or two poles we can't trust that. so wait for more than 1.
	 * but if the oldest pole is very old, make an iteration.
	 */
	if (/*poleList.front().time-(*m_currTimePtr) > 50000 && */poleList.size()<=2) return;

	//reset counter
	m_stepsWithoutPolePicture=0;

#ifdef LOCALIZATION_DEBUG
	printf("Algorithm::ParticleFilter(list<PoleData> & poleList):\n");
	printf("have %d poles to go through\n",poleList.size());
#endif

	//this was a try to make a low pass filter to throw poles with noise. not tested.
	//TODO try to do this. not so important.
	//	bool DoneLPF = poleList.size() < 3 ? true:false; // lpf only for 3 or more poles;
	//	PoleData temp[3];
	//	int i;
	//	list<PoleData>::iterator it;
	//	list<PoleData>::iterator itErase;

	double w;
	while (poleList.size()>0)
	{
		//LPF stuff
		/*
		//		cout<<"START 1  "<<poleList.size()<<endl;
		//		it = poleList.begin();
		//		PoleId ID = it->id;
		//		i=0;
		//		while((!DoneLPF) && it!= poleList.end()) //LPF
		//		{
		//			cout<<"START 2  "<<poleList.size()<<"\ti= "<<i<<endl;
		//			if(it->id == ID)
		//			{
		//				if(it->distFromMe < 0)
		//				{
		//					DoneLPF = true;
		//					continue;
		//				}
		//				else
		//				{
		//					temp[i] = *it;
		//					if(i == 1)
		//						itErase = it;// the pole to erase
		//					it++;
		//					i++;
		//				}
		//			}
		//			if(i == 3)
		//			{
		//				cout<<"IF 2  "<<poleList.size()<<"\ti= "<<i<<endl;
		//				DoneLPF = true;
		//				if((fabs(temp[0].distFromMe - temp[2].distFromMe) < fabs(temp[0].distFromMe - temp[1].distFromMe )) && (temp[2].time - temp[0].time < 1000000))
		//				{
		//					poleList.erase(itErase);
		//				}
		//			}
		//		}
		*/

		PoleData pole=poleList.front();
		poleList.pop_front();

#ifdef LOCALIZATION_DEBUG
		cout<<"\t#poles: "<<poleList.size()<<" \t";
		cout<<"Dist: "<<pole.distFromMe<<"\tID: "<<pole.id<<endl;

		//if(DoneLPF && i==3)
		//	cout<<"Dist: "<<temp[1].distFromMe<<"\tID: "<<temp[1].id<<"\tTHROWED"<<endl;
#endif

		//go through each particle and calculate the wieght
		for (State_iterator it = m_currState.begin(); it != m_currState.end() ;it++)//update weight
		{
			/*
			 * TODO: OMG found a bug!!
			 * what happens when the pole.id==GOAL_UNKNOWN_BACK ???!!!
			 * TODO FIXME
			 */
			if (pole.id==GOAL_UNKNOWN_FRONT) // either left or right front pole, give maximum weight.
			{
				double w0,w1;
				pole.id=GOAL_LEFT_FRONT;
				w0=getWeightFromPole(pole,*it);
				pole.id=GOAL_RIGHT_FRONT;
				w1=getWeightFromPole(pole,*it);
				pole.id=GOAL_UNKNOWN_FRONT;
				if (w0>w1) w=w0; else w=w1;
			}
			else //We know what pole it is.
			{
				w=getWeightFromPole(pole,*it);
			}

			//give new weight for the particle, while considering old weight.
			(it->weight)*=w;
		}
	}

	double sumOfWeight=0;
	for (State_iterator it = m_currState.begin(); it != m_currState.end() ;it++) {
		sumOfWeight+=it->weight;
	}
	sumOfWeight = 1 / sumOfWeight;

	//if all the sum of weight is very low, it means all the particles are out
	if (sumOfWeight<0.001)
		m_outOfLocation=true;
	else
		m_outOfLocation=false;

	//TODO use the sumOfWeight we calculated before and send to this function
	//normalize weight for the monte carlo
	NormalizeWeight(sumOfWeight); //sum of weights = 1

	//choose new set
	UpdateStates();
}
Exemplo n.º 5
0
//This function will eventually replace the above function to handle
// POIs instead of poles
//TODO compare this function to the previous and see if we can upgrade some of the things
// they were unable to do last year
void Algorithm::ParticleFilter(list<PoiData>& poiList)
{
/*
	 * if there is only one or two poles we can't trust that. so wait for more than 1.
	 * but if the oldest pole is very old, make an iteration.
	 */
	//if (/*poleList.front().time-(*m_currTimePtr) > 50000 && */poleList.size()<=2) return;
	//TODO verify this line!!
	if(/**m_currTimePtr - poiList.front().time < 50000 ||*/ poiList.size() <= 2)
		return;

	//reset counter
	m_stepsWithoutPolePicture=0;

#ifdef LOCALIZATION_DEBUG
	cout << "[Algorithm::ParticleFilter]: have " << poiList.size() << " POIs to go through" << endl;
#endif

	//TODO attempt to create a LPF for the POIs
	double w, maxW;
	while (poiList.size()>0)
	{
		PoiData poi = poiList.front();
		poiList.pop_front();

#ifdef LOCALIZATION_DEBUG
		cout<<"\t#POIs: "<<poiList.size()<<" \t";
		cout<<"Dist: "<<poi.distFromMe<<"\tID: "<<poi.id<<endl;
#endif

		//go through each particle and calculate the weight
		for (State_iterator it = m_currState.begin(); it != m_currState.end() ;it++)//update weight
		{
			if (poi.id==UNKNOWN)
			{
				w = 0;
				maxW = 0;
				for(int i=0;i<NUMBER_OF_POIS;i++)
				{
					poi.id = (PoiId)i;
					w = GetWeightFromPoi(poi,*it);
					maxW = w > maxW ? w : maxW;
				}
				w = maxW;
				poi.id = UNKNOWN;
			}
			else //We know what poi it is.
			{
				w = GetWeightFromPoi(poi,*it);
			}

			//give new weight for the particle, while considering old weight.
			(it->weight)*=w;
		}
	}

	double sumOfWeight=0;
	for (State_iterator it = m_currState.begin(); it != m_currState.end() ;it++) {
		sumOfWeight += it->weight;
	}

	//if all the sum of weight is very low, it means all the particles are out
	if ((sumOfWeight/m_currState.size()) < 0.001)
		m_outOfLocation=true;
	else
		m_outOfLocation=false;

	NormalizeWeight(sumOfWeight);

	//choose new set
	UpdateStates();
}
void CLWSamplingInfEngine::
MarginalNodes( const int *queryIn, int querySz, int notExpandJPD)
{
	PNL_CHECK_IS_NULL_POINTER(queryIn);

	if( m_bNormalized == false ) NormalizeWeight();

	int i, j, k;
	int offset;
	int nsamples = m_particleCount;
    	
	const CBNet *pBNet = static_cast<const CBNet *>( m_pGraphicalModel );
	int type = 0;
	int totalnodesizes = 0;
	intVector nodesize(querySz);
	intVector mulnodesize(querySz, 1); 
	
	for( i = querySz; --i >=0; )
	{
		const CNodeType* pNodeType = pBNet->GetNodeType(queryIn[i]);
		nodesize[i] = pNodeType->GetNodeSize();			
		
		if(i == querySz-1)	
			mulnodesize[i] = 1;
		else
			mulnodesize[i] *= nodesize[i+1];

		if(pNodeType->IsDiscrete())
		{
			type++;
			
			if(totalnodesizes == 0)
				totalnodesizes = nodesize[i];
			else
				totalnodesizes = totalnodesizes * nodesize[i]; 
		}
		else
		{
			totalnodesizes = totalnodesizes + nodesize[i]; 
		}
	}
	
	if(type == querySz)
	{
		//all query nodes are discrete
		float *tab = new float[totalnodesizes];
		for(i = 0; i < totalnodesizes; i++) tab[i] = 0;

		for( i = 0; i < nsamples; i++)
		{
			CEvidence* pEvidence = m_currentEvVec[i];
			
			int index = 0;
			for(j = 0; j < querySz; j++)
			{
				Value* pValue = pEvidence->GetValue(queryIn[j]);
				index += pValue->GetInt() * mulnodesize[j];
			}					
			
			tab[index] += m_particleWeight[i];
		}

		m_pQueryJPD = CTabularPotential::Create(queryIn, querySz, pBNet->GetModelDomain ()  );
		m_pQueryJPD->AllocMatrix( tab, matTable );

		delete []tab;
	}
	else if(type == 0)
	{
		//all query nodes are gaussian
		float* val  = new float[totalnodesizes];
		float* mean = new float[totalnodesizes];
		float* cov  = new float[totalnodesizes * totalnodesizes];
		for( i = 0; i < totalnodesizes; i++) mean[i] = 0;
		for( i = 0; i < totalnodesizes * totalnodesizes; i++) cov[i] = 0;

		// mean
		for( i = 0; i < nsamples; i++)
		{
			CEvidence* pEvidence = m_currentEvVec[i];
			
			for(j = 0, 	offset = 0; j < querySz; j++)
			{
				Value* pValue = pEvidence->GetValue(queryIn[j]);
				for(k = 0; k < nodesize[j]; k++)
				{
					mean[offset] += m_particleWeight[i] * pValue[k].GetFlt();
					offset++;
				}
			}
		}
		
		// covariance
		for( i = 0; i < nsamples; i++)
		{
			CEvidence* pEvidence = m_currentEvVec[i];
			
			for(j = 0, 	offset = 0; j < querySz; j++)
			{
				Value* pValue = pEvidence->GetValue(queryIn[j]);
				for(k = 0; k < nodesize[j]; k++)
				{
					val[offset] = pValue[k].GetFlt();
					offset++;
				}
			}
			
			for(j = 0; j < totalnodesizes; j++)
			{
				for(k = j; k < totalnodesizes; k++)
				{
					cov[k*totalnodesizes+j] += ( m_particleWeight[i] * ( val[j]- mean[j]) * (val[k] - mean[k]) );
					cov[j*totalnodesizes+k] = cov[k*totalnodesizes+j];
				}
			}
		}

		m_pQueryJPD = CGaussianPotential::Create( queryIn, querySz, pBNet->GetModelDomain () );
		m_pQueryJPD->AllocMatrix( mean, matMean );
		m_pQueryJPD->AllocMatrix( cov, matCovariance);

		delete []val;
		delete []mean;
		delete []cov;
	}

	//Get MPE
	delete m_pEvidenceMPE;
	m_pEvidenceMPE = NULL;

    m_pEvidenceMPE = m_pQueryJPD->GetMPE();
}
void CLWSamplingInfEngine::Estimate(CEvidence * pEstimate)
{
	if( pEstimate == NULL ) return;
	if( m_bNormalized == false ) NormalizeWeight();

	int i, j, k, offset;

	int  iNodes   = pEstimate->GetNumberObsNodes ();
	const int* obsNodes = pEstimate->GetAllObsNodes();
	const CNodeType *const* pNodeTypes = pEstimate->GetNodeTypes();
	
	int totalNodeSize =0;
	intVector nodeSize(iNodes);
	for( j = 0, totalNodeSize = 0; j < iNodes; j++)
	{
		nodeSize[j] = pNodeTypes[j]->GetNodeSize();
		totalNodeSize += nodeSize[j];
	}
	
	floatVector pFloatValue;
	pFloatValue.resize(totalNodeSize, 0);
		
	for( j = 0, offset = 0; j < iNodes; j++)
	{
		if( pNodeTypes[j]->IsDiscrete())  
		{
			for( i = 0; i < m_particleCount; i++)
			{
				Value* pValue = (m_currentEvVec[i])->GetValue(obsNodes[j]);

				// for discrete value type
				pFloatValue[offset + pValue->GetInt()] = pFloatValue[offset + pValue->GetInt()] + m_particleWeight[i];
			}
		}
		else
		{
			for( i = 0; i < m_particleCount; i++)
			{
				Value* pValue = (m_currentEvVec[i])->GetValue(obsNodes[j]);

				// for continuous value type
				for( k =0; k < nodeSize[j]; k++)
				{
					pFloatValue[offset + k] = pFloatValue[offset + k] + m_particleWeight[i] * (pValue+k)->GetFlt();
				}
			}
		}
		offset += nodeSize[j];
	}

	// set pFloatValue data to pEstimate
	for( j = 0, offset = 0; j < iNodes; j++)
	{
		Value* pValue = pEstimate->GetValue(obsNodes[j]);
		
		if( pNodeTypes[j]->IsDiscrete())  
		{
			float maxweight = -99; int itmax = 0;
			for( k =0; k < nodeSize[j]; k++)
			{
				if(maxweight < pFloatValue[offset + k])
				{
					maxweight = pFloatValue[offset + k];
					itmax = k;
				}
				pValue->SetInt(itmax);
			}
		}
		else
		{
			for( k =0; k < nodeSize[j]; k++)
				(pValue+k)->SetFlt( pFloatValue[offset + k] );
		}

		offset += nodeSize[j];
	}
}