Ejemplo n.º 1
0
bfun* complement (bfun* b_initial) {
  // does not free argument.  

  bfun* b = try_simplify(b_initial);
  if (b_initial != b) {
    return b;
  } else {
    int x = best_split(b);

    bfun* pc = pos_co(b,x);
    bfun* nc = neg_co(b,x);

    bfun* p = complement(pc);
    bfun* n = complement(nc);

    del_bfun(pc);
    del_bfun(nc);

    // and_var modifies the passed cube_list
    and_var(p, x);
    and_var(n, x * -1);

    // or allocates a new cube & copies the cubelists over
    bfun* result = or(p,n);
    del_bfun(p);
    del_bfun(n);
    
    return result;
  }
}
Ejemplo n.º 2
0
dectree_node* Dectree_class::learn_dectree(const cv::Mat& p_samples_labels, const cv::Mat& samples_labels, const cv::Mat& samples_data, std::vector<int> attr, int depth)
{
	dectree_split* split = new dectree_split();	

	//auxiliary variables
	//double h_curr = 0.;
	Dectree_BST dectree; //decision tree structure
	dectree_node* dectree_root = dectree.get_root();

	//check the base cases to get out of the recursion
	//if there are no more examples		
	if(samples_labels.empty())
	{
		//std::cout << "Case 1" << std::endl;
		dectree.insert_node(&dectree_root, "terminal", (++terminal_nodes_idx), depth, -1, plurality(p_samples_labels));
		return dectree_root;
	}
	//if all examples have the same classification
	else if(check_classif(samples_labels))
	{
		dectree.insert_node(&dectree_root, "terminal", (++terminal_nodes_idx), depth, -1, samples_labels.at<int>(0));
		return dectree_root;
	}
	//if there are no more attributes 
	else if(attr.empty())
	{
		//std::cout << "Case 3" << std::endl;
		dectree.insert_node(&dectree_root, "terminal", (++terminal_nodes_idx), depth, -1, plurality(samples_labels));
		return dectree_root;

	}
	else if(depth >= depth_limit || samples_labels.rows < min_samples) //if this case is hit, there are attributes and samples to analyze
	{
		dectree.insert_node(&dectree_root, "terminal", (++terminal_nodes_idx), depth, -1, plurality(samples_labels));
		return dectree_root;
	}
	else
	{
		//find the attrribute with the highest information gain
		//h_curr = compute_entropy(samples_labels);
		//std::cout << "Current entropy: " << h_curr << std::endl;	
		split = best_split(attr, samples_data, samples_labels);
		//std::cout << "Best attribute: " << split->attr_name << std::endl;

		//create a tree with the best attribute as root
		dectree.insert_node(&dectree_root,"split", (++split_nodes_idx), depth, split->attr_name, -1);	
		//std::cout << "Tree was splitted" << std::endl;

		//erase the attribute used and call the fcn recursively
		//for each of the classes of the attribute and the set
		//of examples created
		attr.erase(attr.begin()+split->attr_idx);
		(dectree_root)->f = learn_dectree(samples_labels,split->neg_attr_labels,split->neg_attr_data,attr,(depth+1));	
		(dectree_root)->t = learn_dectree(samples_labels,split->pos_attr_labels,split->pos_attr_data,attr,(depth+1));

		//std::cout << "Learning done" << std::endl;

	
		return dectree_root;	
	}
}