示例#1
0
	vector<int> Recoloring::MatchGaussians(CvEM& source_model, CvEM& target_model) {
		int num_g = source_model.get_nclusters();
		Mat sMu(source_model.get_means());
		Mat tMu(target_model.get_means());
		const CvMat** target_covs = target_model.get_covs();
		const CvMat** source_covs = source_model.get_covs();

		double best_dist = std::numeric_limits<double>::max();
		vector<int> best_res(num_g);
		vector<int> prmt(num_g); 

		for(int itr = 0; itr < 10; itr++) {
			for(int i=0;i<num_g;i++) prmt[i] = i;	//make a permutation
			randShuffle(Mat(prmt));

			//Greedy selection
			vector<int> res(num_g);
			vector<bool> taken(num_g);
			for(int sg = 0; sg < num_g; sg++) {
				double min_dist = std::numeric_limits<double>::max(); 
				int minv = -1;
				for(int tg = 0; tg < num_g; tg++) {
					if(taken[tg]) continue;

					//TODO: can save on re-calculation of pairs - calculate affinity matrix ahead
					//double d = norm(sMu(Range(prmt[sg],prmt[sg]+1),Range(0,3)),	tMu(Range(tg,tg+1),Range(0,3)));
					
					//symmetric kullback-leibler
					Mat diff = Mat(sMu(Range(prmt[sg],prmt[sg]+1),Range(0,3)) - tMu(Range(tg,tg+1),Range(0,3)));
					Mat d = diff * Mat(Mat(source_covs[prmt[sg]]).inv() + Mat(target_covs[tg]).inv()) * diff.t();
					Scalar tr = trace(Mat(
						Mat(Mat(source_covs[prmt[sg]])*Mat(target_covs[tg])) + 
						Mat(Mat(target_covs[tg])*Mat(source_covs[prmt[sg]]).inv()) + 
						Mat(Mat::eye(3,3,CV_64FC1)*2)
						));
					double kl_dist = ((double*)d.data)[0] + tr[0];
					if(kl_dist<min_dist) {
						min_dist = kl_dist;
						minv = tg;
					}
				}
				res[prmt[sg]] = minv;
				taken[minv] = true;
			}

			double dist = 0;
			for(int i=0;i<num_g;i++) {
				dist += norm(sMu(Range(prmt[i],prmt[i]+1),Range(0,3)),
							tMu(Range(res[prmt[i]],res[prmt[i]]+1),Range(0,3)));
			}
			if(dist < best_dist) {
				best_dist = dist;
				best_res = res;
			}
		}

		return best_res;
	}
示例#2
0
// ######################################################################
// train or test the network
void run(int isTest)
{
    LINFO("Run the samples");
    double errSum = double(nSamples);
    double err;
    Image<double> ffnOut;
    int nfc = nSamples;
    int fc;
    int nfcClass[info->nOutput][info->nOutput];//confusion matrix[target][output]
    int nTrials = 0;
    int target = 0;

    if(nSamples == 0) return;
    int order[nSamples];
    for(int i = 0; i < nSamples; i++) order[i] = i;

    while(nTrials < MAX_EPOCH && !isTest && nfc > int(nSamples*ERR_THRESHOLD))
    {
        // reinitialize statistic variables
        for(uint i = 0; i < info->nOutput; i++)
            for(uint j = 0; j < info->nOutput; j++)
                nfcClass[i][j] = 0;
        errSum = 0.0;
        nfc = 0;

        // run the input in random order
        randShuffle(order, nSamples);

        for(int i = 0; i < nSamples; i++)
        {
            // run the input
            ffn->run3L(in[order[i]]);
            ffnOut = ffn->getOutput();

            // get the error
            diff(out[order[i]], ffnOut, err, fc, target);

            // add misclassification count
            if(fc != -1)
            {
                nfc++;
                nfcClass[target][fc]++;
            }
            else
                nfcClass[target][target]++;

            // and the numerical deviation
            errSum += err;

            if(fc != -1)
            {
                //ffn->setLearnRate(learnRate*10);
                ffn->backprop3L(out[order[i]]);
                //ffn->setLearnRate(learnRate);
            }
        }
        nTrials++;

        // periodically report progress
        if(nTrials %1 == 0)
        {
            printf("Trial_%04d_Err: %f nfc: %5d/%5d -> %f%%\n",
                   nTrials, errSum/nSamples,
                   nfc,nSamples,(double)(nfc)/(0.0 + nSamples)*100.0);

            printf("class |");
            for(uint k = 0;  k < info->nOutput; k++)
                printf(" %4d", k);
            printf("\n");
            for(uint k = 0;  k < info->nOutput; k++)
                printf("------");
            printf("\n");
            for(uint k = 0; k < info->nOutput; k++)
            {
                printf("%6d|",k);
                for(uint j = 0; j < info->nOutput; j++)
                    printf(" %4d",nfcClass[k][j]);
                printf("\n");
            }
        }
        printf("\n");
    }

    // print the results if testing
    if(isTest)
    {
        nfc = 0;
        errSum = 0.0;
        err = 0;
        for(uint i = 0; i < info->nOutput; i++)
            for(uint j = 0; j < info->nOutput; j++)
                nfcClass[i][j] = 0;

        for(int i = 0; i < nSamples; i++)
        {
            // run the input
            ffn->run3L(in[i]);

            // get the output
            ffnOut = ffn->getOutput();

            // get the error
            diff(out[i], ffnOut, err, fc, target);

            // add misclassification count
            if(fc != -1)
            {
                nfc++;
                nfcClass[target][fc]++;
            }
            else
                nfcClass[target][target]++;

            // and the numerical deviation
            errSum += err;

            if((fc != -1) | 1)
            {
                printf("sample %5d: ",i);
                for(uint j = 0; j < info->nOutput; j++)
                    printf("%.3f ",out[i][j]);
                printf(" -:- ");
                for(uint j = 0; j < info->nOutput; j++)
                    printf("%.3f ",ffnOut[j]);
            }
            if(fc != -1) printf(" WRONG! NO:%d  [%d][%d] = %d \n",
                                    nfc, target, fc, nfcClass[target][fc]);
            else printf("\n");
        }
    }

    // final error count
    printf("Final Trial_%04d_Err: %f nfc: %5d/%5d -> %.3f%%\n",
           nTrials,errSum/nSamples,
           nfc,nSamples,(double)(nfc)/(0.0 + nSamples)*100.0);

    printf("class |");
    for(uint k = 0;  k < info->nOutput; k++)
        printf(" %5d",k);
    printf("     Total          pct. err \n-------");
    for(uint k = 0;  k < info->nOutput; k++)
        printf("------");
    printf("\n");
    for(uint k = 0; k < info->nOutput; k++)
    {
        int t = 0, e = 0;
        printf("%6d|",k);
        for(uint j = 0; j < info->nOutput; j++)
        {
            printf(" %5d",nfcClass[k][j]);
            if(k == j)
                t = nfcClass[k][j];
            else
                e += nfcClass[k][j];
        }
        if(e+t == 0)
            printf(" %6d/%6d     N/A%%\n",0,0);
        else
            printf(" %6d/%6d  %6.2f%%\n",e,e+t, float(e)/float(e+t)*100.0);
    }

    for(uint k = 0;  k < info->nOutput; k++)
        printf("------");
    printf("-------\nFalse+|");
    for(uint k = 0; k < info->nOutput; k++)
    {
        int e = 0;
        for(uint j = 0; j < info->nOutput; j++)
        {
            if(k == j)
                ; //t = nfcClass[j][k];
            else
                e += nfcClass[j][k];
        }
        printf(" %5d",e);
    }
    printf("\ntotal |");
    for(uint k = 0; k < info->nOutput; k++)
    {
        int t = 0, e = 0;
        for(uint j = 0; j < info->nOutput; j++)
        {
            if(k == j)
                t = nfcClass[j][k];
            else
                e += nfcClass[j][k];
        }
        printf(" %5d",e+t);
    }
    printf("\nerr:  |");
    for(uint k = 0; k < info->nOutput; k++)
    {
        int t = 0, e = 0;
        for(uint j = 0; j < info->nOutput; j++)
        {
            if(k == j)
                t = nfcClass[j][k];
            else
                e += nfcClass[j][k];
        }
        if(e+t == 0)
            printf("  N/A");
        else
            printf(" %5.2f",float(e)/float(e+t)*100.0);
    }
    printf("\n");
}
示例#3
0
void population::fixer(specimen_t& indi)
{
	uint64_t iter, jter;

	// Randomize list of indices to decrease bias
	Mat lister(1, NUMBER_DIMENSIONS, CV_8UC1);
	for (iter=0; iter<NUMBER_DIMENSIONS; iter++)
	{
		lister.at<uint8_t>(iter) = iter;
	}
	randShuffle(lister, 1, &rudi_);

	GENO_TYPE checker;
	// Fix for adjacency
	for (iter=0; iter<NUMBER_DIMENSIONS; iter++)
	{
		if (indi.gen.one[lister.at<uint8_t>(iter)]==1)
		{
			if (fixPos_)
			{
				// Fix particle
				checker = West73_Adjacency[lister.at<uint8_t>(iter)] & indi.gen.one;
				if (checker.any())
				{
					for (jter=0; jter<NUMBER_DIMENSIONS; jter++)
					{
						if (checker[iter]==1)
							indi.cc.pos[iter] = rudi_.uniform(0.0,1.0);
					}
				}
			}
			// Fix genotype
			indi.gen.one &= West73_NotAdjacency[lister.at<uint8_t>(iter)];
		}
		else if (indi.gen.two[lister.at<uint8_t>(iter)]==1)
		{
			if (fixPos_)
			{
				// Fix particle
				checker = West73_Adjacency[lister.at<uint8_t>(iter)] & indi.gen.two;
				if (checker.any())
				{
					for (jter=0; jter<NUMBER_DIMENSIONS; jter++)
					{
						if (checker[iter]==1)
							indi.cc.pos[iter] = rudi_.uniform(0.0,1.0);
					}
				}
			}

			// Fix genotype
			indi.gen.two &= West73_NotAdjacency[lister.at<uint8_t>(iter)];
		}
		else if (indi.gen.thr[lister.at<uint8_t>(iter)]==1)
		{
			if (fixPos_)
			{
				// Fix particle
				checker = West73_Adjacency[lister.at<uint8_t>(iter)] & indi.gen.thr;
				if (checker.any())
				{
					for (jter=0; jter<NUMBER_DIMENSIONS; jter++)
					{
						if (checker[iter]==1)
							indi.cc.pos[iter] = rudi_.uniform(0.0,1.0);
					}
				}
			}

			// Fix genotype
			indi.gen.thr &= West73_NotAdjacency[lister.at<uint8_t>(iter)];
		}
	}

/*
	// Fix for repeated harvesting
	uint64_t temp = rudi_.uniform(1,4);
	if (temp==1)
	{
		indi.gen.two &= ~indi.gen.one;
		indi.gen.thr &= ~indi.gen.one;

		temp = rudi_.uniform(0,2);
		if (temp==0)
			indi.gen.thr &= ~indi.gen.two;
		else
			indi.gen.two &= ~indi.gen.thr;
	}
	else if (temp==2)
	{
		indi.gen.one &= ~indi.gen.two;
		indi.gen.thr &= ~indi.gen.two;

		temp = rudi_.uniform(0,2);
		if (temp==0)
			indi.gen.thr &= ~indi.gen.one;
		else
			indi.gen.one &= ~indi.gen.thr;
	}
	else if (temp==3)
	{
		indi.gen.one &= ~indi.gen.thr;
		indi.gen.two &= ~indi.gen.thr;

		temp = rudi_.uniform(0,2);
		if (temp==0)
			indi.gen.two &= ~indi.gen.one;
		else
			indi.gen.one &= ~indi.gen.two;
	}
*/
}