예제 #1
0
int main(int argc, char** argv) {

	// We will assume 2-state variables, where, to conform to the "small" example
	// we have 0 == "right answer" and 1 == "wrong answer"
	size_t nrStates = 2;

	// define variables
	DiscreteKey Cathy(1, nrStates), Heather(2, nrStates), Mark(3, nrStates),
			Allison(4, nrStates);

	// create graph
	DiscreteFactorGraph graph;

	// add node potentials
	graph.add(Cathy,   "1 3");
	graph.add(Heather, "9 1");
	graph.add(Mark,    "1 3");
	graph.add(Allison, "9 1");

	// add edge potentials
	graph.add(Cathy & Heather, "2 1 1 2");
	graph.add(Heather & Mark,  "2 1 1 2");
	graph.add(Mark & Allison,  "2 1 1 2");

	// Print the UGM distribution
	cout << "\nUGM distribution:" << endl;
	vector<DiscreteFactor::Values> allPosbValues = cartesianProduct(
			Cathy & Heather & Mark & Allison);
	for (size_t i = 0; i < allPosbValues.size(); ++i) {
		DiscreteFactor::Values values = allPosbValues[i];
		double prodPot = graph(values);
		cout << values[Cathy.first] << " " << values[Heather.first] << " "
				<< values[Mark.first] << " " << values[Allison.first] << " :\t"
				<< prodPot << "\t" << prodPot / 3790 << endl;
	}

	// "Decoding", i.e., configuration with largest value (MPE)
	// We use sequential variable elimination
	DiscreteSequentialSolver solver(graph);
	DiscreteFactor::sharedValues optimalDecoding = solver.optimize();
	optimalDecoding->print("\noptimalDecoding");

	// "Inference" Computing marginals
	cout << "\nComputing Node Marginals .." << endl;
	Vector margProbs;

	margProbs = solver.marginalProbabilities(Cathy);
	print(margProbs, "Cathy's Node Marginal:");

	margProbs = solver.marginalProbabilities(Heather);
	print(margProbs, "Heather's Node Marginal");

	margProbs = solver.marginalProbabilities(Mark);
	print(margProbs, "Mark's Node Marginal");

	margProbs = solver.marginalProbabilities(Allison);
	print(margProbs, "Allison's Node Marginal");

	return 0;
}
예제 #2
0
// Second truss example with non-trivial factors
TEST_UNSAFE( DiscreteMarginals, truss2 ) {

  const int nrNodes = 5;
  const size_t nrStates = 2;

  // define variables
  vector<DiscreteKey> nodes;
  for (int i = 0; i < nrNodes; i++) {
    DiscreteKey dk(i, nrStates);
    nodes.push_back(dk);
  }

  // create graph and add three truss potentials
  DiscreteFactorGraph graph;
  graph.add(nodes[0] & nodes[2] & nodes[4],"1 2 3 4 5 6 7 8");
  graph.add(nodes[1] & nodes[3] & nodes[4],"1 2 3 4 5 6 7 8");
  graph.add(nodes[2] & nodes[3] & nodes[4],"1 2 3 4 5 6 7 8");

  // Calculate the marginals by brute force
  vector<DiscreteFactor::Values> allPosbValues = cartesianProduct(
      nodes[0] & nodes[1] & nodes[2] & nodes[3] & nodes[4]);
  Vector T = zero(5), F = zero(5);
  for (size_t i = 0; i < allPosbValues.size(); ++i) {
    DiscreteFactor::Values x = allPosbValues[i];
    double px = graph(x);
    for (size_t j=0;j<5;j++)
      if (x[j]) T[j]+=px; else F[j]+=px;
    // cout << x[0] << " " << x[1] << " "<< x[2] << " " << x[3] << " " << x[4] << " :\t" << px << endl;
  }

  // Check all marginals given by a sequential solver and Marginals
  DiscreteSequentialSolver solver(graph);
  DiscreteMarginals marginals(graph);
  for (size_t j=0;j<5;j++) {
    double sum = T[j]+F[j];
    T[j]/=sum;
    F[j]/=sum;

    // solver
    Vector actualV = solver.marginalProbabilities(nodes[j]);
    EXPECT(assert_equal(Vector_(2,F[j],T[j]), actualV));

    // Marginals
    vector<double> table;
    table += F[j],T[j];
    DecisionTreeFactor expectedM(nodes[j],table);
    DiscreteFactor::shared_ptr actualM = marginals(j);
    EXPECT(assert_equal(expectedM, *boost::dynamic_pointer_cast<DecisionTreeFactor>(actualM)));
  }
}
예제 #3
0
/* ************************************************************************* */
TEST_UNSAFE( DiscreteMarginals, UGM_small ) {
  size_t nrStates = 2;
  DiscreteKey Cathy(1, nrStates), Heather(2, nrStates), Mark(3, nrStates),
      Allison(4, nrStates);
  DiscreteFactorGraph graph;

  // add node potentials
  graph.add(Cathy, "1 3");
  graph.add(Heather, "9 1");
  graph.add(Mark, "1 3");
  graph.add(Allison, "9 1");

  // add edge potentials
  graph.add(Cathy & Heather, "2 1 1 2");
  graph.add(Heather & Mark, "2 1 1 2");
  graph.add(Mark & Allison, "2 1 1 2");

  DiscreteMarginals marginals(graph);
  DiscreteFactor::shared_ptr actualC = marginals(Cathy.first);
  DiscreteFactor::Values values;

  values[Cathy.first] = 0;
  EXPECT_DOUBLES_EQUAL( 0.359631, (*actualC)(values), 1e-6);

  Vector actualCvector = marginals.marginalProbabilities(Cathy);
  EXPECT(assert_equal(Vector_(2, 0.359631, 0.640369), actualCvector, 1e-6));

  actualCvector = marginals.marginalProbabilities(Mark);
  EXPECT(assert_equal(Vector_(2, 0.48628, 0.51372), actualCvector, 1e-6));
}
예제 #4
0
/* ************************************************************************* */
TEST_UNSAFE( DiscreteMarginals, truss ) {

  const int nrNodes = 5;
  const size_t nrStates = 2;

  // define variables
  vector<DiscreteKey> nodes;
  for (int i = 0; i < nrNodes; i++) {
    DiscreteKey dk(i, nrStates);
    nodes.push_back(dk);
  }

  // create graph and add three truss potentials
  DiscreteFactorGraph graph;
  graph.add(nodes[0] & nodes[2] & nodes[4],"2 2 2 2 1 1 1 1");
  graph.add(nodes[1] & nodes[3] & nodes[4],"1 1 1 1 2 2 2 2");
  graph.add(nodes[2] & nodes[3] & nodes[4],"1 1 1 1 1 1 1 1");
  typedef JunctionTree<DiscreteFactorGraph> JT;
  GenericMultifrontalSolver<DiscreteFactor, JT> solver(graph);
  BayesTree<DiscreteConditional>::shared_ptr bayesTree = solver.eliminate(&EliminateDiscrete);
//  bayesTree->print("Bayes Tree");
  typedef BayesTreeClique<DiscreteConditional> Clique;

  Clique expected0(boost::make_shared<DiscreteConditional>((nodes[0] | nodes[2], nodes[4]) = "2/1 2/1 2/1 2/1"));
  Clique::shared_ptr actual0 = (*bayesTree)[0];
//  EXPECT(assert_equal(expected0, *actual0)); // TODO, correct but fails

  Clique expected1(boost::make_shared<DiscreteConditional>((nodes[1] | nodes[3], nodes[4]) = "1/2 1/2 1/2 1/2"));
  Clique::shared_ptr actual1 = (*bayesTree)[1];
//  EXPECT(assert_equal(expected1, *actual1)); // TODO, correct but fails

  // Create Marginals instance
  DiscreteMarginals marginals(graph);

  // test 0
  DecisionTreeFactor expectedM0(nodes[0],"0.666667 0.333333");
  DiscreteFactor::shared_ptr actualM0 = marginals(0);
  EXPECT(assert_equal(expectedM0, *boost::dynamic_pointer_cast<DecisionTreeFactor>(actualM0),1e-5));

  // test 1
  DecisionTreeFactor expectedM1(nodes[1],"0.333333 0.666667");
  DiscreteFactor::shared_ptr actualM1 = marginals(1);
  EXPECT(assert_equal(expectedM1, *boost::dynamic_pointer_cast<DecisionTreeFactor>(actualM1),1e-5));
}
예제 #5
0
/* ************************************************************************* */
TEST_UNSAFE( DiscreteMarginals, UGM_chain ) {

	const int nrNodes = 10;
	const size_t nrStates = 7;

	// define variables
	vector<DiscreteKey> nodes;
	for (int i = 0; i < nrNodes; i++) {
		DiscreteKey dk(i, nrStates);
		nodes.push_back(dk);
	}

	// create graph
	DiscreteFactorGraph graph;

	// add node potentials
	graph.add(nodes[0], ".3 .6 .1 0 0 0 0");
	for (int i = 1; i < nrNodes; i++)
		graph.add(nodes[i], "1 1 1 1 1 1 1");

	const std::string edgePotential =   ".08 .9 .01 0 0 0 .01 "
																			".03 .95 .01 0 0 0 .01 "
																			".06 .06 .75 .05 .05 .02 .01 "
																			"0 0 0 .3 .6 .09 .01 "
																			"0 0 0 .02 .95 .02 .01 "
																			"0 0 0 .01 .01 .97 .01 "
																			"0 0 0 0 0 0 1";

	// add edge potentials
	for (int i = 0; i < nrNodes - 1; i++)
		graph.add(nodes[i] & nodes[i + 1], edgePotential);

	DiscreteMarginals marginals(graph);
	DiscreteFactor::shared_ptr actualC = marginals(nodes[2].first);
	DiscreteFactor::Values values;

	values[nodes[2].first] = 0;
	EXPECT_DOUBLES_EQUAL( 0.03426, (*actualC)(values), 1e-4);
}
예제 #6
0
int main(int argc, char **argv) {

  // We assume binary state variables
  // we have 0 == "False" and 1 == "True"
  const size_t nrStates = 2;

  // define variables
  DiscreteKey Cloudy(1, nrStates), Sprinkler(2, nrStates), Rain(3, nrStates),
      WetGrass(4, nrStates);

  // create Factor Graph of the bayes net
  DiscreteFactorGraph graph;

  // add factors
  graph.add(Cloudy, "0.5 0.5"); //P(Cloudy)
  graph.add(Cloudy & Sprinkler, "0.5 0.5 0.9 0.1"); //P(Sprinkler | Cloudy)
  graph.add(Cloudy & Rain, "0.8 0.2 0.2 0.8"); //P(Rain | Cloudy)
  graph.add(Sprinkler & Rain & WetGrass,
      "1 0 0.1 0.9 0.1 0.9 0.001 0.99"); //P(WetGrass | Sprinkler, Rain)

  // Alternatively we can also create a DiscreteBayesNet, add DiscreteConditional
  // factors and create a FactorGraph from it. (See testDiscreteBayesNet.cpp)

  // Since this is a relatively small distribution, we can as well print
  // the whole distribution..
  cout << "Distribution of Example: " << endl;
  cout << setw(11) << "Cloudy(C)" << setw(14) << "Sprinkler(S)" << setw(10)
      << "Rain(R)" << setw(14) << "WetGrass(W)" << setw(15) << "P(C,S,R,W)"
      << endl;
  for (size_t a = 0; a < nrStates; a++)
    for (size_t m = 0; m < nrStates; m++)
      for (size_t h = 0; h < nrStates; h++)
        for (size_t c = 0; c < nrStates; c++) {
          DiscreteFactor::Values values;
          values[Cloudy.first] = c;
          values[Sprinkler.first] = h;
          values[Rain.first] = m;
          values[WetGrass.first] = a;
          double prodPot = graph(values);
          cout << boolalpha << setw(8) << (bool) c << setw(14)
              << (bool) h << setw(12) << (bool) m << setw(13)
              << (bool) a << setw(16) << prodPot << endl;
        }


  // "Most Probable Explanation", i.e., configuration with largest value
  DiscreteSequentialSolver solver(graph);
  DiscreteFactor::sharedValues optimalDecoding = solver.optimize();
  cout <<"\nMost Probable Explanation (MPE):" << endl;
  cout << boolalpha << "Cloudy = " << (bool)(*optimalDecoding)[Cloudy.first]
                  << "  Sprinkler = " << (bool)(*optimalDecoding)[Sprinkler.first]
                  << "  Rain = " << boolalpha << (bool)(*optimalDecoding)[Rain.first]
                  << "  WetGrass = " << (bool)(*optimalDecoding)[WetGrass.first]<< endl;


  // "Inference" We show an inference query like: probability that the Sprinkler was on;
  // given that the grass is wet i.e. P( S | W=1) =?
  cout << "\nInference Query: Probability of Sprinkler being on given Grass is Wet" << endl;

  // Method 1: we can compute the joint marginal P(S,W) and from that we can compute
  // P(S | W=1) = P(S,W=1)/P(W=1) We do this in following three steps..

  //Step1: Compute P(S,W)
  DiscreteFactorGraph jointFG;
  jointFG = *solver.jointFactorGraph(DiscreteKeys(Sprinkler & WetGrass).indices());
  DecisionTreeFactor probSW = jointFG.product();

  //Step2: Compute P(W)
  DiscreteFactor::shared_ptr probW = solver.marginalFactor(WetGrass.first);

  //Step3: Computer P(S | W=1) = P(S,W=1)/P(W=1)
  DiscreteFactor::Values values;
  values[WetGrass.first] = 1;

  //print P(S=0|W=1)
  values[Sprinkler.first] = 0;
  cout << "P(S=0|W=1) = " << probSW(values)/(*probW)(values) << endl;

  //print P(S=1|W=1)
  values[Sprinkler.first] = 1;
  cout << "P(S=1|W=1) = " << probSW(values)/(*probW)(values) << endl;

  // TODO: Method 2 : One way is to modify the factor graph to
  // incorporate the evidence node and compute the marginal
  // TODO: graph.addEvidence(Cloudy,0);

  return 0;
}