/***********************************************************************//** * @brief GSparseMatrix to GSymMatrix storage class convertor * * @param[in] matrix Sparse matrix (GSparseMatrix). * * @exception GException::matrix_not_symmetric * Sparse matrix is not symmetric. * * Converts a sparse matrix into the symmetric storage class. If the input * matrix is not symmetric, an exception is thrown. ***************************************************************************/ GSymMatrix::GSymMatrix(const GSparseMatrix& matrix) { // Initialise class members for clean destruction init_members(); // Allocate matrix memory alloc_members(matrix.rows(), matrix.cols()); // Fill matrix for (int col = 0; col < matrix.cols(); ++col) { for (int row = col; row < matrix.rows(); ++row) { double value_ll = matrix(row,col); double value_ur = matrix(col,row); if (value_ll != value_ur) { throw GException::matrix_not_symmetric(G_CAST_SPARSEMATRIX, matrix.rows(), matrix.cols()); } (*this)(row, col) = matrix(row, col); } } // Return return; }
/***********************************************************************//** * @brief GSparseMatrix to GMatrix storage class convertor * * @param[in] matrix Sparse matrix (GSparseMatrix). * * This constructor converts a sparse matrix (of type GSparseMatrix) into a * generic matrix. As the result is generic, the conversion will succeed in * all cases. ***************************************************************************/ GMatrix::GMatrix(const GSparseMatrix& matrix) : GMatrixBase(matrix) { // Initialise class members for clean destruction init_members(); // Construct matrix alloc_members(matrix.rows(), matrix.cols()); // Fill matrix for (int col = 0; col < matrix.cols(); ++col) { for (int row = 0; row < matrix.rows(); ++row) { (*this)(row, col) = matrix(row, col); } } // Return return; }
// virtual void GNaiveBayes::trainSparse(GSparseMatrix& features, GMatrix& labels) { if(features.rows() != labels.rows()) throw Ex("Expected the features and labels to have the same number of rows"); size_t featureDims = features.cols(); GUniformRelation featureRel(featureDims, 2); beginIncrementalLearning(featureRel, labels.relation()); GVec fullRow(featureDims); for(size_t n = 0; n < features.rows(); n++) { features.fullRow(fullRow, n); for(size_t i = 0; i < featureDims; i++) { if(fullRow[i] < 1e-6) fullRow[i] = 0.0; else fullRow[i] = 1.0; } trainIncremental(fullRow, labels[n]); } }