// virtual void GInstanceTable::train(GData* pData, int labelDims) { enableIncrementalLearning(pData->relation(), labelDims, NULL, NULL); int dims = pData->relation()->size() - 1; for(size_t i = 0; i < pData->rows(); i++) { double* pRow = pData->row(i); trainIncremental(pRow, pRow + dims); } }
// virtual void GNaiveBayes::trainInner(const GMatrix& features, const GMatrix& labels) { if(!features.relation().areNominal()) throw Ex("GNaiveBayes only supports nominal features. Perhaps you should wrap it in a GAutoFilter."); if(!labels.relation().areNominal()) throw Ex("GNaiveBayes only supports nominal labels. Perhaps you should wrap it in a GAutoFilter."); beginIncrementalLearningInner(features.relation(), labels.relation()); for(size_t n = 0; n < features.rows(); n++) trainIncremental(features[n], labels[n]); }
// virtual void GNaiveInstance::trainInner(const GMatrix& features, const GMatrix& labels) { if(!features.relation().areContinuous()) throw Ex("GNaiveInstance only supports continuous features. Perhaps you should wrap it in a GAutoFilter."); if(!labels.relation().areContinuous()) throw Ex("GNaiveInstance only supports continuous labels. Perhaps you should wrap it in a GAutoFilter."); beginIncrementalLearningInner(features.relation(), labels.relation()); for(size_t i = 0; i < features.rows(); i++) trainIncremental(features[i], labels[i]); }
// virtual void GNaiveBayes::trainSparse(GSparseMatrix& features, GMatrix& labels) { if(features.rows() != labels.rows()) throw Ex("Expected the features and labels to have the same number of rows"); size_t featureDims = features.cols(); GUniformRelation featureRel(featureDims, 2); beginIncrementalLearning(featureRel, labels.relation()); GVec fullRow(featureDims); for(size_t n = 0; n < features.rows(); n++) { features.fullRow(fullRow, n); for(size_t i = 0; i < featureDims; i++) { if(fullRow[i] < 1e-6) fullRow[i] = 0.0; else fullRow[i] = 1.0; } trainIncremental(fullRow, labels[n]); } }