int main(int argc, char **argv) { init_shogun(&print_message, &print_message, &print_message); /* create example tree */ CModelSelectionParameters* tree=create_param_tree(); tree->print(); SG_SPRINT("----------------------------------\n"); /* build combinations of parameter trees */ DynArray<CParameterCombination*> combinations; tree->get_combinations(combinations); apply_parameter_tree(combinations); /* print and directly delete them all */ for (index_t i=0; i<combinations.get_num_elements(); ++i) combinations[i]->destroy(true, true); /* delete example tree (after processing of combinations because CSGObject * (namely the kernel) of the tree is SG_UNREF'ed (and not REF'ed anywhere * else) */ tree->destroy(); exit_shogun(); return 0; }
void apply_parameter_tree(DynArray<CParameterCombination*>& combinations) { /* create some data */ float64_t* matrix=new float64_t[6]; for (index_t i=0; i<6; i++) matrix[i]=i; /* create three 2-dimensional vectors * to avoid deleting these, REF now and UNREF when finished */ CSimpleFeatures<float64_t>* features=new CSimpleFeatures<float64_t> (); features->set_feature_matrix(matrix, 2, 3); SG_REF(features); /* create three labels, will be handed to svm and automaticall deleted */ CLabels* labels=new CLabels(3); labels->set_label(0, -1); labels->set_label(1, +1); labels->set_label(2, -1); /* create libsvm with C=10 and train */ CLibSVM* svm=new CLibSVM(); svm->set_labels(labels); for (index_t i=0; i<combinations.get_num_elements(); ++i) { SG_SPRINT("applying:\n"); combinations[i]->print(); CParameterCombination* current_combination=combinations[i]; Parameter* current_parameters=svm->m_parameters; current_combination->apply_to_parameter(current_parameters); /* get kernel to set features, get_kernel SG_REF's the kernel */ CKernel* kernel=svm->get_kernel(); kernel->init(features, features); svm->train(); /* classify on training examples */ for (index_t i=0; i<3; i++) SG_SPRINT("output[%d]=%f\n", i, svm->apply(i)); /* unset features and SG_UNREF kernel */ kernel->cleanup(); SG_UNREF(kernel); SG_SPRINT("----------------\n\n"); } /* free up memory */ SG_UNREF(features); SG_UNREF(svm); }
float64_t CBaggingMachine::get_oob_error(CEvaluation* eval) const { REQUIRE(m_combination_rule != NULL, "Combination rule is not set!"); REQUIRE(m_bags->get_num_elements() > 0, "BaggingMachine is not trained!"); SGMatrix<float64_t> output(m_features->get_num_vectors(), m_bags->get_num_elements()); if (m_labels->get_label_type() == LT_REGRESSION) output.zero(); else output.set_const(NAN); /* TODO: add parallel support of applying the OOBs only possible when add_subset is thread-safe #pragma omp parallel for num_threads(parallel->get_num_threads()) */ for (index_t i = 0; i < m_bags->get_num_elements(); i++) { CMachine* m = dynamic_cast<CMachine*>(m_bags->get_element(i)); CDynamicArray<index_t>* current_oob = dynamic_cast<CDynamicArray<index_t>*>(m_oob_indices->get_element(i)); SGVector<index_t> oob(current_oob->get_array(), current_oob->get_num_elements(), false); oob.display_vector(); m_features->add_subset(oob); CLabels* l = m->apply(m_features); SGVector<float64_t> lv = l->get_values(); // assign the values in the matrix (NAN) that are in-bag! for (index_t j = 0; j < oob.vlen; j++) output(oob[j], i) = lv[j]; m_features->remove_subset(); SG_UNREF(current_oob); SG_UNREF(m); SG_UNREF(l); } output.display_matrix(); DynArray<index_t> idx; for (index_t i = 0; i < m_features->get_num_vectors(); i++) { if (m_all_oob_idx[i]) idx.push_back(i); } SGVector<float64_t> combined = m_combination_rule->combine(output); CLabels* predicted = NULL; switch (m_labels->get_label_type()) { case LT_BINARY: predicted = new CBinaryLabels(combined); break; case LT_MULTICLASS: predicted = new CMulticlassLabels(combined); break; case LT_REGRESSION: predicted = new CRegressionLabels(combined); break; default: SG_ERROR("Unsupported label type\n"); } m_labels->add_subset(SGVector<index_t>(idx.get_array(), idx.get_num_elements(), false)); float64_t res = eval->evaluate(predicted, m_labels); m_labels->remove_subset(); return res; }