Beispiel #1
0
int main(void) {
   	mocapy_seed((uint)5556575);

	// Number of trainining sequences
	int N = 500;

	// Sequence lengths
	int T = 100;

	// Gibbs sampling parameters
	int MCMC_BURN_IN = 10;

	// HMM hidden and observed node sizes
	uint H_SIZE=2;
	bool init_random=false;

	CPD th0_cpd;
	th0_cpd.set_shape(2); th0_cpd.set_values(vec(0.1, 0.9));

	CPD th1_cpd;
	th1_cpd.set_shape(2,2); th1_cpd.set_values( vec(0.95, 0.05, 0.1, 0.9));

	// The target DBN (This DBN generates the data)
	Node* th0 = NodeFactory::new_discrete_node(H_SIZE, "th0", init_random, th0_cpd);
	Node* th1 = NodeFactory::new_discrete_node(H_SIZE, "th1", init_random, th1_cpd);
	Node* to0 = NodeFactory::new_kent_node("to0");

	DBN tdbn;
	tdbn.set_slices(vec(th0, to0), vec(th1, to0));

	tdbn.add_intra("th0", "to0");
	tdbn.add_inter("th0", "th1");
	tdbn.construct();

	// The model DBN (this DBN will be trained)
	// For mh0, get the CPD from th0 and fix parameters
	Node* mh0 = NodeFactory::new_discrete_node(H_SIZE, "mh0", init_random, CPD(), th0, true );
	Node* mh1 = NodeFactory::new_discrete_node(H_SIZE, "mh1", init_random);
	Node* mo0 = NodeFactory::new_kent_node("mo0");

	DBN mdbn;
	mdbn.set_slices(vec(mh0, mo0), vec(mh1, mo0));

	mdbn.add_intra("mh0", "mo0");
	mdbn.add_inter("mh0", "mh1");
	mdbn.construct();

	cout << "*** TARGET ***" << endl;
	cout << *th0 << endl;
	cout << *th1 << endl;
	cout << *to0 << endl;

	cout << "*** MODEL ***" << endl;
	cout << *mh0 << endl;
	cout << *mh1 << endl;
	cout << *mo0 << endl;

	vector<Sequence> seq_list;
	vector< MDArray<eMISMASK> > mismask_list;

	cout << "Generating data" << endl;

	MDArray<eMISMASK> mismask;
	mismask.repeat(T, vec(MOCAPY_HIDDEN, MOCAPY_OBSERVED));

	// Generate the data
	double sum_LL(0);
	for (int i=0; i<N; i++) {
		pair<Sequence, double>  seq_ll = tdbn.sample_sequence(T);
		sum_LL += seq_ll.second;
 		seq_list.push_back(seq_ll.first);
 		mismask_list.push_back(mismask);
	}
	cout << "Average LL: " << sum_LL/N << endl;

	GibbsRandom mcmc = GibbsRandom(&mdbn);
	EMEngine em = EMEngine(&mdbn, &mcmc, &seq_list, &mismask_list);

	cout << "Starting EM loop" << endl;
	double bestLL=-1000;
	uint it_no_improvement(0);
	uint i(0);
	// Start EM loop
	while (it_no_improvement<10) {
		em.do_E_step(1, MCMC_BURN_IN, true);

		double ll = em.get_loglik();

		cout << "LL= " << ll;

		if (ll > bestLL) {
			cout << " * saving model *" << endl;
			mdbn.save("kent_hmm.dbn");
			bestLL = ll;
			it_no_improvement=0;
		}
		else { it_no_improvement++; cout << endl; }

		i++;
		em.do_M_step();
	}

	cout << "DONE" << endl;

	mdbn.load("kent_hmm.dbn");

	cout << "*** TARGET ***" << endl;
	cout << *th0 << endl;
	cout << *th1 << endl;
	cout << *to0 << endl;

	cout << "*** MODEL ***" << endl;
	cout << *mh0 << endl;
	cout << *mh1 << endl;
	cout << *mo0 << endl;


	delete th0;
	delete th1;
	delete to0;

	delete mh0;
	delete mh1;
	delete mo0;
	return EXIT_SUCCESS;
}
Beispiel #2
0
void execute(DBN& dbn, task& task, const std::vector<std::string>& actions) {
    print_title("Network");
    dbn.display();

    using dbn_t = std::decay_t<DBN>;

    //Execute all the actions sequentially
    for (auto& action : actions) {
        if (action == "pretrain") {
            print_title("Pretraining");

            if (task.pretraining.samples.empty()) {
                std::cout << "dllp: error: pretrain is not possible without a pretraining input" << std::endl;
                return;
            }

            std::vector<Container> pt_samples;

            //Try to read the samples
            if (!read_samples<Three>(task.pretraining.samples, pt_samples)) {
                std::cout << "dllp: error: failed to read the pretraining samples" << std::endl;
                return;
            }

            if (task.pt_desc.denoising) {
                std::vector<Container> clean_samples;

                //Try to read the samples
                if (!read_samples<Three>(task.pretraining_clean.samples, clean_samples)) {
                    std::cout << "dllp: error: failed to read the clean samples" << std::endl;
                    return;
                }

                //Pretrain the network
                cpp::static_if<dbn_t::layers_t::is_denoising>([&](auto f) {
                    f(dbn).pretrain_denoising(pt_samples.begin(), pt_samples.end(), clean_samples.begin(), clean_samples.end(), task.pt_desc.epochs);
                });
            } else {
                //Pretrain the network
                dbn.pretrain(pt_samples.begin(), pt_samples.end(), task.pt_desc.epochs);
            }
        } else if (action == "train") {
            print_title("Training");

            if (task.training.samples.empty() || task.training.labels.empty()) {
                std::cout << "dllp: error: train is not possible without samples and labels" << std::endl;
                return;
            }

            std::vector<Container> ft_samples;
            std::vector<std::size_t> ft_labels;

            //Try to read the samples
            if (!read_samples<Three>(task.training.samples, ft_samples)) {
                std::cout << "dllp: error: failed to read the training samples" << std::endl;
                return;
            }

            //Try to read the labels
            if (!read_labels(task.training.labels, ft_labels)) {
                std::cout << "dllp: error: failed to read the training labels" << std::endl;
                return;
            }

            using last_layer = typename dbn_t::template layer_type<dbn_t::layers - 1>;

            //Train the network
            cpp::static_if<sgd_possible<last_layer>::value>([&](auto f) {
                auto ft_error = f(dbn).fine_tune(ft_samples, ft_labels, task.ft_desc.epochs);
                std::cout << "Train Classification Error:" << ft_error << std::endl;
            });

        } else if (action == "test") {
            print_title("Testing");

            if (task.testing.samples.empty() || task.testing.labels.empty()) {
                std::cout << "dllp: error: test is not possible without samples and labels" << std::endl;
                return;
            }

            std::vector<Container> test_samples;
            std::vector<std::size_t> test_labels;

            //Try to read the samples
            if (!read_samples<Three>(task.testing.samples, test_samples)) {
                std::cout << "dllp: error: failed to read the test samples" << std::endl;
                return;
            }

            //Try to read the labels
            if (!read_labels(task.testing.labels, test_labels)) {
                std::cout << "dllp: error: failed to read the test labels" << std::endl;
                return;
            }

            auto classes = dbn_t::output_size();

            etl::dyn_matrix<std::size_t, 2> conf(classes, classes, 0.0);

            std::size_t n  = test_samples.size();
            std::size_t tp = 0;

            for (std::size_t i = 0; i < test_samples.size(); ++i) {
                auto sample = test_samples[i];
                auto label  = test_labels[i];

                auto predicted = dbn.predict(sample);

                if (predicted == label) {
                    ++tp;
                }

                ++conf(label, predicted);
            }

            double test_error = (n - tp) / double(n);

            std::cout << "Error rate: " << test_error << std::endl;
            std::cout << "Accuracy: " << (1.0 - test_error) << std::endl
                      << std::endl;

            std::cout << "Results per class" << std::endl;

            double overall = 0.0;

            std::cout << "   | Accuracy | Error rate |" << std::endl;

            for (std::size_t l = 0; l < classes; ++l) {
                std::size_t total = etl::sum(conf(l));
                double acc = (total - conf(l, l)) / double(total);
                std::cout << std::setw(3) << l;
                std::cout << "|" << std::setw(10) << (1.0 - acc) << "|" << std::setw(12) << acc << "|" << std::endl;
                overall += acc;
            }

            std::cout << std::endl;

            std::cout << "Overall Error rate: " << overall / classes << std::endl;
            std::cout << "Overall Accuracy: " << 1.0 - (overall / classes) << std::endl
                      << std::endl;

            std::cout << "Confusion Matrix (%)" << std::endl
                      << std::endl;

            std::cout << "    ";
            for (std::size_t l = 0; l < classes; ++l) {
                std::cout << std::setw(5) << l << " ";
            }
            std::cout << std::endl;

            for (std::size_t l = 0; l < classes; ++l) {
                std::size_t total = etl::sum(conf(l));
                std::cout << std::setw(3) << l << "|";
                for (std::size_t p = 0; p < classes; ++p) {
                    std::cout << std::setw(5) << std::setprecision(2) << 100.0 * (conf(l, p) / double(total)) << "|";
                }
                std::cout << std::endl;
            }
            std::cout << std::endl;
        } else if (action == "save") {
            print_title("Save Weights");

            dbn.store(task.w_desc.file);
            std::cout << "Weights saved" << std::endl;
        } else if (action == "load") {
            print_title("Load Weights");

            dbn.load(task.w_desc.file);
            std::cout << "Weights loaded" << std::endl;
        } else {
            std::cout << "dllp: error: Invalid action: " << action << std::endl;
        }
    }

    //TODO
}
Beispiel #3
0
int main (int argc, char * argv[])
{
   
   std::streambuf *psbuf = NULL;
   std::ofstream log_stream;
   std::streambuf *pStreambuf = std::cout.rdbuf();
   
   //--------------RNG INIT STUFF
   srand((unsigned)time(0));
   long seed;
   r = gsl_rng_alloc (gsl_rng_rand48);     // pick random number generator
   seed = time (NULL) * getpid();
   gsl_rng_set (r, seed);                  // set seed
   // TESTING AREA
   
   //---------------
   DBN *dbn;
   
   if (command_option_exists(argv, argv+argc, "-n")) {
      dbn = return_network();
      std::cout << dbn << std::endl;
      std::cout << "Ready to learn" << std::endl;
      std::cout << "If you are using visualization:" << std::endl;
      std::cout << "'V': pauses visualization" << std::endl;
      std::cout << "<SPACE>: pauses learning" << std::endl;
      std::cout << "'+'/'-' increases/decreases learning rate" << std::endl;
      std::cout << "'['/']' increases/decreases output threshold" << std::endl;
      std::cout << "'L' stops learning for layer (skips to next if any)" << std::endl;
      std::cout << "Current visualization is the features displayed on top with the plot of the reconstruction cost in the box (gl text not supported yet)" << std::endl;
      std::cout << "Press <ENTER> to start learning: ";
      std::cin.get();
   }
   else if (command_option_exists(argv, argv+argc, "-l")) {
      std::string filename = get_command_line(argv, argv+argc, "-l");
      MLP mlp = load_MLP(filename);
      dbn = new DBN(mlp);
      dbn->data_layers.clear();
      dbn->view();
      exit(EXIT_SUCCESS);
   }
   else if (command_option_exists(argv, argv+argc, "-f")) {
      if (argc != 3) {
         print_usage();
         exit(EXIT_FAILURE);
      }
      //Initialization, time, logfile stuff, etc.
      time_t t = time(0);   // get time now
      struct tm * the_time = localtime( & t );
      mkdir(out_path.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH);
      out_path += convert_to_string(*the_time) + "/";
      mkdir((out_path).c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH);
      
      std::string filename = get_command_line(argv, argv+argc, "-f");
      dbn = return_aod_network(filename, log_stream, psbuf);
      
   }
   else if (command_option_exists(argv, argv+argc, "-F")) {
      
      std::string filename = get_command_line(argv, argv+argc, "-F");
      MLP mlp = load_MLP(filename); 
      Autoencoder ae(mlp);
      ae.name = (mlp).name + "fine_tuning";
      Gradient_Descent gd(0);
      gd.teachAE(ae);
      MLP ae_mlp;
      save(ae);
      exit(EXIT_SUCCESS);
   }
   else if (command_option_exists(argv, argv+argc, "-stack")) {
      std::string filename = get_command_line(argv, argv+argc, "-stack");
      dbn = load_and_stack(filename, log_stream, psbuf);
   }
   else {
      print_usage();
      exit(EXIT_SUCCESS);
   }
   ContrastiveDivergence cd(1000);
   dbn->learn(cd);
   std::cout.rdbuf(pStreambuf);
   log_stream.close();
   exit(1);
   return 0;
}
Beispiel #4
0
int main(void) {
   	mocapy_seed((uint)1);

	// Number of training sequences
	int N = 1000;

	// Allocate storage in each node.
	int max_num_dat_points = N;

	// Sequence lengths
	int T = 1;

	// Gibbs sampling parameters
	int MCMC_BURN_IN = 10;

	// HMM hidden and observed node sizes
	uint H_SIZE=1;
	uint O_SIZE=20;
	bool init_random=true;

	// The target DBN (This DBN generates the data)
	Node* th0 = NodeFactory::new_discrete_node(H_SIZE, "th0", init_random);
	Node* th1 = NodeFactory::new_discrete_node(H_SIZE, "th1", init_random);

	Node* to0 = NodeFactory::new_GDM_node("to0", O_SIZE , max_num_dat_points);

	DBN tdbn;
	tdbn.set_slices(vec(th0, to0), vec(th1, to0));

	tdbn.add_intra("th0", "to0");
	tdbn.add_inter("th0", "th1");

	cout << "model has been defined" << endl;
	tdbn.construct();

	// The model DBN (this DBN will be trained)
	// For mh0, get the CPD from th0 and fix parameters
	Node* mh0 = NodeFactory::new_discrete_node(H_SIZE, "mh0", init_random, CPD(), th0, true );
	Node* mh1 = NodeFactory::new_discrete_node(H_SIZE, "mh1", init_random);
	Node* mo0 = NodeFactory::new_GDM_node("mo0",O_SIZE, max_num_dat_points);

	DBN mdbn;
	mdbn.set_slices(vec(mh0, mo0), vec(mh1, mo0));

	mdbn.add_intra("mh0", "mo0");
	mdbn.add_inter("mh0", "mh1");
	mdbn.construct();

	cout << "*** TARGET ***" << endl;
	cout << *th0 << endl;
	cout << *th1 << endl;
	cout << *to0 << endl;

	cout << "*** MODEL ***" << endl;
	cout << *mh0 << endl;
	cout << *mh1 << endl;
	cout << *mo0 << endl;

	vector<Sequence> seq_list;
	vector< MDArray<eMISMASK> > mismask_list;

	cout << "Generating data" << endl;

	MDArray<eMISMASK> mismask;
	mismask.repeat(T, vec(MOCAPY_HIDDEN, MOCAPY_OBSERVED));

	// Setting parameters
	CPD pars;
	double s = 4;
	pars.set_shape(H_SIZE,2*(O_SIZE-1));
	
	vector<double> parvec;
	for(uint i = 0;i < O_SIZE-1;i++){
	  parvec.push_back(s);
	}
	for(double i =  s*(O_SIZE-1);i >= 1;i=i-s){
	  parvec.push_back(i);
	}
	
/*
	for(uint i = 0;i < O_SIZE-1;i++){
	  parvec.push_back(s);
	}
	for(double i = O_SIZE-1;i >= 1;i=i-s){
	  parvec.push_back(s*i);
	}
	
					*/
	pars.set_values(parvec);
	((GDMNode*)to0)->get_densities()->set_parameters(pars);
	((GDMNode*)to0)->get_densities()->set_SumOfCounts(4000);
	pair<Sequence, double>  seq_ll;	
	// Generate the data
	double sum_LL(0);
	for (int i=0; i<N; i++) {
	  seq_ll = tdbn.sample_sequence(T);
	  sum_LL += seq_ll.second;
	  seq_list.push_back(seq_ll.first);
	  mismask_list.push_back(mismask);
	}
	cout << "Average LL: " << sum_LL/N << endl;

	GibbsRandom mcmc = GibbsRandom(&mdbn);
	EMEngine em = EMEngine(&mdbn, &mcmc, &seq_list, &mismask_list);

	cout << "Starting EM loop" << endl;
	double bestLL=-1000;
	uint it_no_improvement(0);
	uint i(0);
	bool l;
	// Start EM loop

	while (it_no_improvement<100) {
		em.do_E_step(1, MCMC_BURN_IN, true);
		double ll = em.get_loglik();
		cout << "LL= " << ll;

		if (ll > bestLL) {
		  cout << " * saving model *" << endl;
		  cout << *mo0 << endl;	
		  l = true;
		  //mdbn.save("discrete_hmm.dbn");
		  bestLL = ll;
		  it_no_improvement=0;
		}
		else { 
		  it_no_improvement++; 
		  cout << endl;
		}
		i++;
		em.do_M_step();
		if (l){
		  l=false;
		}
	}

	cout << "DONE" << endl;

	//	mdbn.load("discrete_hmm.dbn");

	cout << "*** TARGET ***" << endl;
	cout << *th0 << endl;
	cout << *th1 << endl;
	cout << *to0 << endl;

	cout << "*** MODEL ***" << endl;
	cout << *mh0 << endl;
	cout << *mh1 << endl;
	cout << *mo0 << endl;


	//	em.do_E_step(1, MCMC_BURN_IN, true);/
	//	((GDMNode*)mo0)->get_densities()->set_parameters(pars);
	//	double ll = em.get_loglik();
	//	cout << "LL= " << ll;
	//	cout << *mo0 << endl;

	delete th0;
	delete th1;
	delete to0;

	delete mh0;
	delete mh1;
	delete mo0;
	return EXIT_SUCCESS;
}
Beispiel #5
0
DBN* KjaerulfsBNetModel()
{
    DBN *net;
    net = new DBN();

    textcolor(WHITE);

    net->AddNode(discrete^"node0-0 node1-0 node2-0 node3-0 node4-0 node5-0 node6-0 node7-0", "true false");
    printf("\n net->AddNode(discrete^\"node0-0 node1-0 node2-0 \n\t node3-0 node4-0 node5-0 node6-0 node7-0\", \"true false\");");
    textcolor(LIGHTGREEN);

    printf("\t\t\tAdding of nodes is in process....");
    _sleep(2000);
    textcolor(WHITE);

    net->AddNode(discrete^"node0-1 node1-1 node2-1 node3-1 node4-1 node5-1 node6-1 node7-1", "true false");
    printf("\n net->AddNode(discrete^\"node0-1 node1-1 node2-1 \n\t node3-1 node4-1 node5-1 node6-1 node7-1\", \"true false\");");
    _sleep(1000);

    textcolor(LIGHTGREEN);
    printf("\n ......All nodes are added....\n");
    getch();

    // arcs
    textcolor(WHITE);

    net->AddArc("node0-0", "node1-0 node2-0");
    printf("\n net->AddArc(\"node0-0\", \"node1-0 node2-0\");");
    textcolor(LIGHTGREEN);
    printf("\t\t\t\t\t\tAdding of arcs is in process....");
    _sleep(2000);

    textcolor(WHITE);

    net->AddArc("node1-0", "node2-0");
    printf("\n net->AddArc(\"node1-0\", \"node2-0\");");

    net->AddArc("node2-0", "node3-0");
    printf("\n net->AddArc(\"node2-0\", \"node3-0\");");

    net->AddArc("node3-0", "node4-0 node5-0");
    printf("\n net->AddArc(\"node3-0\", \"node4-0 node5-0\");");

    net->AddArc("node4-0 node5-0", "node6-0");
    printf("\n net->AddArc(\"node4-0 node5-0\", \"node6-0\");");

    net->AddArc("node6-0", "node7-0");
    printf("\n net->AddArc(\"node6-0\", \"node7-0\");");

    net->AddArc("node0-1", "node1-1 node2-1");
    printf("\n net->AddArc(\"node0-1\", \"node1-1 node2-1\");");

    net->AddArc("node1-1", "node2-1");
    printf("\n net->AddArc(\"node1-1\", \"node2-1\");");

    net->AddArc("node2-1", "node3-1");
    printf("\n net->AddArc(\"node2-1\", \"node3-1\");");

    net->AddArc("node3-1", "node4-1 node5-1");
    printf("\n net->AddArc(\"node3-1\", \"node4-1 node5-1\");");

    net->AddArc("node4-1 node5-1", "node6-1");
    printf("\n net->AddArc(\"node4-1 node5-1\", \"node6-1\");");

    net->AddArc("node6-1", "node7-1");
    printf("\n net->AddArc(\"node6-1\", \"node7-1\");");

    net->AddArc("node0-0", "node0-1");
    printf("\n net->AddArc(\"node0-0\", \"node0-1\");");

    net->AddArc("node3-0", "node3-1");
    printf("\n net->AddArc(\"node3-0\", \"node3-1\");");

    net->AddArc("node7-0", "node7-1");
    printf("\n net->AddArc(\"node7-0\", \"node7-1\");");
    textcolor(LIGHTGREEN);
    printf("\n ......All arcs are added....\n");
    getch();
    // distributions
    textcolor(WHITE);

    net->SetPTabular("node0-0^false node0-0^true", "0.5 0.5");
    printf("\n net->SetPTabular(\"node0-0^false node0-0^true\", \"0.5 0.5\");");
    textcolor(LIGHTGREEN);
    printf("\t\t\t\tAdding of distributions is in process....");
    _sleep(2000);

    textcolor(WHITE);
    net->SetPTabular("node1-0^false node1-0^true", "0.98 0.02", "node0-0^false");
    printf("\n net->SetPTabular(\"node1-0^false node1-0^true\", \"0.98 0.02\",  \n\t \"node0-0^false\");");

    net->SetPTabular("node1-0^false node1-0^true", "0.5 0.5", "node0-0^true");
    printf("\n net->SetPTabular(\"node1-0^false node1-0^true\", \"0.5 0.5\",  \n\t \"node0-0^true\");");

    net->SetPTabular("node2-0^false node2-0^true", "1.0 0.0", "node0-0^false node1-0^false");
    printf("\n net->SetPTabular(\"node2-0^false node2-0^true\", \"1.0 0.0\",  \n\t \"node0-0^false node1-0^false\");");

    net->SetPTabular("node2-0^false node2-0^true", "0.0 1.0", "node0-0^false node1-0^true");
    printf("\n net->SetPTabular(\"node2-0^false node2-0^true\", \"0.0 1.0\",  \n\t \"node0-0^false node1-0^true\");");

    net->SetPTabular("node2-0^false node2-0^true", "0.0 1.0", "node0-0^true node1-0^false");
    printf("\n net->SetPTabular(\"node2-0^false node2-0^true\", \"0.0 1.0\",  \n\t \"node0-0^true node1-0^false\");");

    net->SetPTabular("node2-0^false node2-0^true", "0.5 0.5", "node0-0^true node1-0^true");
    printf("\n net->SetPTabular(\"node2-0^false node2-0^true\", \"0.5 0.5\",  \n\t \"node0-0^true node1-0^true\");");


    net->SetPTabular("node3-0^false node3-0^true", "0.99 0.01", "node2-0^false");
    printf("\n net->SetPTabular(\"node3-0^false node3-0^true\", \"0.99 0.01\",  \n\t \"node2-0^false\");");

    net->SetPTabular("node3-0^false node3-0^true", "0.8 0.2", "node2-0^true");
    printf("\n net->SetPTabular(\"node3-0^false node3-0^true\", \"0.8 0.2\",  \n\t \"node2-0^true\");");

    net->SetPTabular("node4-0^false node4-0^true", "0.99 0.01", "node3-0^false");
    printf("\n net->SetPTabular(\"node4-0^false node4-0^true\", \"0.99 0.01\",  \n\t \"node3-0^false\");");

    net->SetPTabular("node4-0^false node4-0^true", "0.92 0.08", "node3-0^true");
    printf("\n net->SetPTabular(\"node4-0^false node4-0^true\", \"0.92 0.08\",  \n\t \"node3-0^true\");");

    net->SetPTabular("node5-0^false node5-0^true", "0.79 0.21", "node3-0^false");
    printf("\n net->SetPTabular(\"node5-0^false node5-0^true\", \"0.79 0.21\",  \n\t \"node3-0^false\");");

    net->SetPTabular("node5-0^false node5-0^true", "0.65 0.35", "node3-0^true");
    printf("\n net->SetPTabular(\"node5-0^false node5-0^true\", \"0.65 0.35\",  \n\t \"node3-0^true\");");


    net->SetPTabular("node6-0^false node6-0^true", "0.9 0.1", "node4-0^false node5-0^false");
    printf("\n net->SetPTabular(\"node6-0^false node6-0^true\", \"0.9 0.1\",  \n\t \"node4-0^false node5-0^false\");");

    net->SetPTabular("node6-0^false node6-0^true", "0.37 0.63", "node4-0^false node5-0^true");
    printf("\n net->SetPTabular(\"node6-0^false node6-0^true\", \"0.37 0.63\",  \n\t \"node4-0^false node5-0^true\");");

    net->SetPTabular("node6-0^false node6-0^true", "0.21 0.79", "node4-0^true node5-0^false");
    printf("\n net->SetPTabular(\"node6-0^false node6-0^true\", \"0.21 0.79\",  \n\t \"node4-0^true node5-0^false\");");

    net->SetPTabular("node6-0^false node6-0^true", "0.2 0.8", "node4-0^true node5-0^true");
    printf("\n net->SetPTabular(\"node6-0^false node6-0^true\", \"0.2 0.8\",  \n\t \"node4-0^true node5-0^true\");");


    net->SetPTabular("node7-0^false node7-0^true", "0.91 0.09", "node6-0^false");
    printf("\n net->SetPTabular(\"node7-0^false node7-0^true\", \"0.91 0.09\",  \n\t \"node6-0^false\");");

    net->SetPTabular("node7-0^false node7-0^true", "0.22 0.78", "node6-0^true");
    printf("\n net->SetPTabular(\"node7-0^false node7-0^true\", \"0.22 0.78\",  \n\t \"node6-0^true\");");

    net->SetPTabular("node0-1^false node0-1^true", "0.45 0.55", "node0-0^false");
    printf("\n net->SetPTabular(\"node0-1^false node0-1^true\", \"0.45 0.55\",  \n\t \"node0-0^false\");");

    net->SetPTabular("node0-1^false node0-1^true", "0.24 0.76", "node0-0^true");
    printf("\n net->SetPTabular(\"node0-1^false node0-1^true\", \"0.24 0.76\",  \n\t \"node0-0^true\");");

    net->SetPTabular("node1-1^false node1-1^true", "0.51 0.49", "node0-1^false");
    printf("\n net->SetPTabular(\"node1-1^false node1-1^true\", \"0.51 0.49\",  \n\t \"node0-1^false\");");

    net->SetPTabular("node1-1^false node1-1^true", "0.29 0.71", "node0-1^true");
    printf("\n net->SetPTabular(\"node1-1^false node1-1^true\", \"0.29 0.71\",  \n\t \"node0-1^true\");");

    net->SetPTabular("node2-1^false node2-1^true", "0.98 0.02", "node0-1^false node1-1^false");
    printf("\n net->SetPTabular(\"node2-1^false node2-1^true\", \"0.98 0.02\",  \n\t \"node0-1^false node1-1^false\");");

    net->SetPTabular("node2-1^false node2-1^true", "0.4 0.6", "node0-1^false node1-1^true");
    printf("\n net->SetPTabular(\"node2-1^false node2-1^true\", \"0.4 0.6\",  \n\t \"node0-1^false node1-1^true\");");

    net->SetPTabular("node2-1^false node2-1^true", "0.2 0.8", "node0-1^true node1-1^false");
    printf("\n net->SetPTabular(\"node2-1^false node2-1^true\", \"0.2 0.8\",  \n\t \"node0-1^true node1-1^false\");");

    net->SetPTabular("node2-1^false node2-1^true", "0.5 0.5", "node0-1^true node1-1^true");
    printf("\n net->SetPTabular(\"node2-1^false node2-1^true\", \"0.5 0.5\",  \n\t \"node0-1^true node1-1^true\");");

    net->SetPTabular("node3-1^false node3-1^true", "0.36 0.64", "node3-0^false node2-1^false");
    printf("\n net->SetPTabular(\"node3-1^false node3-1^true\", \"0.36 0.64\",  \n\t \"node3-0^false node2-1^false\");");

    net->SetPTabular("node3-1^false node3-1^true", "0.23 0.77", "node3-0^false node2-1^true");
    printf("\n net->SetPTabular(\"node3-1^false node3-1^true\", \"0.23 0.77\",  \n\t \"node3-0^false node2-1^true\");");

    net->SetPTabular("node3-1^false node3-1^true", "0.78 0.22", "node3-0^true node2-1^false");
    printf("\n net->SetPTabular(\"node3-1^false node3-1^true\", \"0.78 0.22\",  \n\t \"node3-0^true node2-1^false\");");

    net->SetPTabular("node3-1^false node3-1^true", "0.11 0.89", "node3-0^true node2-1^true");
    printf("\n net->SetPTabular(\"node3-1^false node3-1^true\", \"0.11 0.89\",  \n\t \"node3-0^true node2-1^true\");");

    net->SetPTabular("node4-1^false node4-1^true", "0.955 0.045", "node3-1^false");
    printf("\n net->SetPTabular(\"node4-1^false node4-1^true\", \"0.955 0.045\",  \n\t \"node3-1^false\");");

    net->SetPTabular("node4-1^false node4-1^true", "0.42 0.58", "node3-1^true");
    printf("\n net->SetPTabular(\"node4-1^false node4-1^true\", \"0.42 0.58\",  \n\t \"node3-1^true\");");

    net->SetPTabular("node5-1^false node5-1^true", "0.57 0.43", "node3-1^false");
    printf("\n net->SetPTabular(\"node5-1^false node5-1^true\", \"0.57 0.43\",  \n\t \"node3-1^false\");");

    net->SetPTabular("node5-1^false node5-1^true", "0.09 0.91", "node3-1^true");
    printf("\n net->SetPTabular(\"node5-1^false node5-1^true\", \"0.09 0.91\",  \n\t \"node3-1^true\");");

    net->SetPTabular("node6-1^false node6-1^true", "0.91 0.09", "node4-1^false node5-1^false");
    printf("\n net->SetPTabular(\"node6-1^false node6-1^true\", \"0.91 0.09\",  \n\t \"node4-1^false node5-1^false\");");

    net->SetPTabular("node6-1^false node6-1^true", "0.13 0.87", "node4-1^false node5-1^true");
    printf("\n net->SetPTabular(\"node6-1^false node6-1^true\", \"0.13 0.87\",  \n\t \"node4-1^false node5-1^true\");");

    net->SetPTabular("node6-1^false node6-1^true", "0.12 0.88", "node4-1^true node5-1^false");
    printf("\n net->SetPTabular(\"node6-1^false node6-1^true\", \"0.12 0.88\",  \n\t \"node4-1^true node5-1^false\");");

    net->SetPTabular("node6-1^false node6-1^true", "0.01 0.99", "node4-1^true node5-1^true");
    printf("\n net->SetPTabular(\"node6-1^false node6-1^true\", \"0.01 0.99\",  \n\t \"node4-1^true node5-1^true\");");

    net->SetPTabular("node7-1^false node7-1^true", "0.39 0.61", "node6-1^false node7-0^false");
    printf("\n net->SetPTabular(\"node7-1^false node7-1^true\", \"0.39 0.61\",  \n\t \"node6-1^false node7-0^false\");");

    net->SetPTabular("node7-1^false node7-1^true", "0.37 0.63", "node6-1^false node7-0^true");
    printf("\n net->SetPTabular(\"node7-1^false node7-1^true\", \"0.37 0.63\",  \n\t \"node6-1^false node7-0^true\");");

    net->SetPTabular("node7-1^false node7-1^true", "0.255 0.745", "node6-1^true node7-0^false");
    printf("\n net->SetPTabular(\"node7-1^false node7-1^true\", \"0.255 0.745\",  \n\t \"node6-1^true node7-0^false\");");

    net->SetPTabular("node7-1^false node7-1^true", "0.1 0.9", "node6-1^true node7-0^true");
    printf("\n net->SetPTabular(\"node7-1^false node7-1^true\", \"0.1 0.9\",  \n\t \"node6-1^true node7-0^true\");");

    _sleep(1000);
    textcolor(LIGHTGREEN);
    printf("\n ......All distributions are added....\n");
    textcolor(WHITE);

    return net;

}
Beispiel #6
0
DBN* DBNModel()
{

    DBN *net;
    net = new DBN();

    textcolor(WHITE);

    net->AddNode(discrete^"Rain-0 Umbrella-0", "True False");
    printf("\n net->AddNode(discrete^\"Rain-0 Umbrella-0\", \"True False\");");
    textcolor(LIGHTGREEN);

    printf("\t\t\t\t\tAdding of nodes is in process....");
    _sleep(2000);

    textcolor(WHITE);
    net->AddNode(discrete^"Rain-1 Umbrella-1", "True False");
    printf("\n net->AddNode(discrete^\"Rain-1 Umbrella-1\", \"True False\");");
    textcolor(LIGHTGREEN);
    printf("\n ......All nodes are added....\n");
    getch();

    // arcs
    textcolor(WHITE);
    net->AddArc("Rain-0", "Umbrella-0");
    printf("\n net->AddArc(\"Rain-0\", \"Umbrella-0\");");
    textcolor(LIGHTGREEN);
    printf("\t\t\t\t\t\t\t\tAdding of arcs is in process....");
    _sleep(2000);

    textcolor(WHITE);
    net->AddArc("Rain-0", "Rain-1");
    printf("\n net->AddArc(\"Rain-0\", \"Rain-1\");");
    _sleep(1000);

    net->AddArc("Rain-1", "Umbrella-1");
    printf("\n net->AddArc(\"Rain-1\", \"Umbrella-1\");");
    textcolor(LIGHTGREEN);
    printf("\n ......All arcs are added....\n");
    getch();

    // distributions
    textcolor(WHITE);
    net->SetPTabular("Rain-0^True Rain-0^False", "0.5 0.5");
    printf("\n net->SetPTabular(\"Rain-0^True Rain-0^False\", \"0.5 0.5\");");
    textcolor(LIGHTGREEN);
    printf("\t\t\t\t\tAdding of distributions is in process....");
    _sleep(2000);

    textcolor(WHITE);
    net->SetPTabular("Umbrella-0^True Umbrella-0^False", "0.9 0.1", "Rain-0^True");
    printf("\n net->SetPTabular(\"Umbrella-0^True Umbrella-0^False\", \"0.9 0.1\", \"Rain-0^True\");");
    _sleep(1000);

    net->SetPTabular("Umbrella-0^True Umbrella-0^False", "0.2 0.8", "Rain-0^False");
    printf("\n net->SetPTabular(\"Umbrella-0^True Umbrella-0^False\", \"0.2 0.8\", \"Rain-0^False\");");
    _sleep(1000);

    net->SetPTabular("Rain-1^True Rain-1^False", "0.7 0.3", "Rain-0^True");
    printf("\n net->SetPTabular(\"Rain-1^True Rain-1^False\", \"0.7 0.3\", \"Rain-0^True\");");
    _sleep(1000);

    net->SetPTabular("Rain-1^True Rain-1^False", "0.3 0.7", "Rain-0^False");
    printf("\n net->SetPTabular(\"Rain-1^True Rain-1^False\", \"0.3 0.7\", \"Rain-0^False\");");
    _sleep(1000);

    net->SetPTabular("Umbrella-1^True Umbrella-1^False", "0.9 0.1", "Rain-1^True");
    printf("\n net->SetPTabular(\"Umbrella-1^True Umbrella-1^False\", \"0.9 0.1\", \"Rain-1^True\");");
    _sleep(1000);

    net->SetPTabular("Umbrella-1^True Umbrella-1^False", "0.2 0.8", "Rain-1^False");
    printf("\n net->SetPTabular(\"Umbrella-1^True Umbrella-1^False\", \"0.2 0.8\", \"Rain-1^False\");");
    textcolor(LIGHTGREEN);
    printf("\n ......All distributions are added....\n");
    getch();
    textcolor(WHITE);

    return net;
}