Ejemplo n.º 1
0
bool SimpleNN::predict(const Mat_<double> &test_X, Mat_<double> &result, string &err_msg){
    
    Mat_<double> input_data = test_X.reshape(0, test_X.rows*test_X.cols); // make it column vector
    
    if (input_data.rows != this->structure[0]){
        err_msg = "wrong input size";
        return false;
    }
    
    for (int row_index = 1; row_index < this->layers[0].rows; ++row_index){
        this->layers[0](row_index, 0) = input_data(row_index-1, 0);
    }

    int num_layers = (int) this->layers.size();
    
    for (int layer_id = 0; layer_id < num_layers - 2; ++layer_id){
        Mat_<double> product = tanh(this->weights[layer_id]*this->layers[layer_id]);
        
        for (int row_index = 1; row_index < this->layers[layer_id+1].rows; ++row_index){
            this->layers[layer_id+1](row_index, 0) = product(row_index-1, 0);
        }
    }
    
    // compute the output layer
    {
        int layer_id = num_layers - 2;
        this->layers[layer_id + 1] = tanh(this->weights[layer_id] * this->layers[layer_id]);
    }
    
    result = this->layers[num_layers - 1]; // return last layers (output layer).
    cout << "result:\n" << result << endl;
    
    err_msg = "";
    
    return true;
}
Ejemplo n.º 2
0
int BaseDecisionTree::fit(Mat_<double> _X,
                          Mat_<double> _y,
                          Mat_<double> sample_weight)
{
    // Validation
    if (_X.rows == 0 || _X.cols == 0)
        return 1;

    // Determine output setting
    _n_samples = _X.rows;
    _n_features = _X.cols;

    // Reshape y to shape[n_samples, 1]
    _y = _y.reshape(1, _y.total());

    // Validation
    if (_y.rows != _n_samples)
        return 2;

    // Calculate class_weight
    Mat expended_class_weight(0, 0, CV_32F);
    // Get class_weight
    if (_class_weight.total() != 0)
        expended_class_weight = compute_sample_weight(_class_weight, _y);

    // Validation
    if (_max_depth <= 0)
        _max_depth = static_cast<int>(pow(2, 31) - 1);
    if (_max_leaf_nodes <= 0)
        _max_leaf_nodes = -1;
    if (_max_features <= 0)
        _max_features = _n_features;
    if (_max_leaf_nodes > -1 && _max_leaf_nodes < 2)
        return 3;
    if (_min_samples_split <= 0)
        return 4;
    if (_min_samples_leaf <= 0)
        return 5;
    if (_min_weight_fraction_leaf >= 0 && _min_weight_fraction_leaf <= 0.5)
        return 6;

    // Set samples' weight
    if (expended_class_weight.total())
    {
        for (int i = 0; i < sample_weight.total(); i++)
        {
            sample_weight.at<double>(i, 0) = sample_weight.at<double>(i, 0) * \
                                             expended_class_weight.at<double>(i, 0);
        }
    }
    else
    {
        sample_weight = expended_class_weight;
    }

    // Set min_weight_fraction_leaf
    if (_min_weight_fraction_leaf != 0.)
        _min_weight_fraction_leaf = _min_weight_fraction_leaf * cv::sum(sample_weight);
    else
        _min_weight_fraction_leaf = 0.;

    // Set min_samples_split
    _min_samples_split = max(_min_samples_split, 2 * _min_samples_leaf);




}