void Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(normalNodeMaxNumChildren, "normalNodeMaxNumChildren"); ar & CreateNVP(splitHistory, "splitHistory"); }
void Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(firstBound, "firstBound"); ar & CreateNVP(secondBound, "secondBound"); ar & CreateNVP(bound, "bound"); ar & CreateNVP(lastDistance, "lastDistance"); }
void DrusillaSelect<MatType>::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(candidateSet, "candidateSet"); ar & CreateNVP(candidateIndices, "candidateIndices"); ar & CreateNVP(l, "l"); ar & CreateNVP(m, "m"); }
void Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // We just need to serialize each of the members. ar & CreateNVP(mean, "mean"); ar & CreateNVP(covariance, "covariance"); ar & CreateNVP(covLower, "covLower"); ar & CreateNVP(invCov, "invCov"); ar & CreateNVP(logDetCov, "logDetCov"); }
void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::Serialize( Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(maxIterations, "maxIterations"); ar & CreateNVP(tolerance, "tolerance"); ar & CreateNVP(clusterer, "clusterer"); ar & CreateNVP(constraint, "constraint"); }
void CF::Serialize(Archive& ar, const unsigned int /* version */) { // This model is simple; just serialize all the members. No special handling // required. using data::CreateNVP; ar & CreateNVP(numUsersForSimilarity, "numUsersForSimilarity"); ar & CreateNVP(rank, "rank"); ar & CreateNVP(w, "w"); ar & CreateNVP(h, "h"); ar & CreateNVP(cleanedData, "cleanedData"); }
void DecisionStump<MatType>::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // This is straightforward; just serialize all of the members of the class. // None need special handling. ar & CreateNVP(classes, "classes"); ar & CreateNVP(bucketSize, "bucketSize"); ar & CreateNVP(splitDimension, "splitDimension"); ar & CreateNVP(split, "split"); ar & CreateNVP(binLabels, "binLabels"); }
void HilbertRTreeAuxiliaryInformation<TreeType ,HilbertValueType>:: Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(hilbertValue, "hilbertValue"); }
void GMM::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(gaussians, "gaussians"); ar & CreateNVP(dimensionality, "dimensionality"); // Load (or save) the gaussians. Not going to use the default std::vector // serialize here because it won't call out correctly to Serialize() for each // Gaussian distribution. if (Archive::is_loading::value) dists.resize(gaussians); for (size_t i = 0; i < gaussians; ++i) { std::ostringstream oss; oss << "dist" << i; ar & CreateNVP(dists[i], oss.str()); } ar & CreateNVP(weights, "weights"); }
void RSModel::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(treeType, "treeType"); ar & CreateNVP(randomBasis, "randomBasis"); ar & CreateNVP(q, "q"); // This should never happen, but just in case... if (Archive::is_loading::value) CleanMemory(); // We'll only need to serialize one of the model objects, based on the type. switch (treeType) { case KD_TREE: ar & CreateNVP(kdTreeRS, "range_search_model"); break; case COVER_TREE: ar & CreateNVP(coverTreeRS, "range_search_model"); break; case R_TREE: ar & CreateNVP(rTreeRS, "range_search_model"); break; case R_STAR_TREE: ar & CreateNVP(rStarTreeRS, "range_search_model"); break; case BALL_TREE: ar & CreateNVP(ballTreeRS, "range_search_model"); break; case X_TREE: ar & CreateNVP(xTreeRS, "range_search_model"); break; } }
void RectangleTree<MetricType, StatisticType, MatType, SplitType, DescentType, AuxiliaryInformationType>:: Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // Clean up memory, if necessary. if (Archive::is_loading::value) { for (size_t i = 0; i < numChildren; i++) delete children[i]; children.clear(); if (ownsDataset && dataset) delete dataset; } ar & CreateNVP(maxNumChildren, "maxNumChildren"); ar & CreateNVP(minNumChildren, "minNumChildren"); ar & CreateNVP(numChildren, "numChildren"); // Due to quirks of boost::serialization, depending on how the user serializes // the tree, the root node may be duplicated. Therefore we don't allow // children of the root to serialize the parent, and we fix the parent link // after serializing the children when loading below. if (Archive::is_saving::value && parent != NULL && parent->Parent() == NULL) { RectangleTree* fakeParent = NULL; ar & CreateNVP(fakeParent, "parent"); } else { ar & CreateNVP(parent, "parent"); } ar & CreateNVP(begin, "begin"); ar & CreateNVP(count, "count"); ar & CreateNVP(numDescendants, "numDescendants"); ar & CreateNVP(maxLeafSize, "maxLeafSize"); ar & CreateNVP(minLeafSize, "minLeafSize"); ar & CreateNVP(bound, "bound"); ar & CreateNVP(stat, "stat"); ar & CreateNVP(parentDistance, "parentDistance"); ar & CreateNVP(dataset, "dataset"); // If we are loading and we are the root, we own the dataset. if (Archive::is_loading::value && parent == NULL) ownsDataset = true; ar & CreateNVP(points, "points"); ar & CreateNVP(auxiliaryInfo, "auxiliaryInfo"); // Because 'children' holds mlpack types (that have Serialize()), we can't use // the std::vector serialization. if (Archive::is_loading::value) children.resize(numChildren); for (size_t i = 0; i < numChildren; ++i) { std::ostringstream oss; oss << "child" << i; ar & CreateNVP(children[i], oss.str()); } // Fix the parent links for the children, if necessary. if (Archive::is_loading::value && parent == NULL) { // Look through each child individually. for (size_t i = 0; i < children.size(); ++i) { children[i]->ownsDataset = false; children[i]->Parent() = this; } } }
void Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(start, "start"); ar & CreateNVP(end, "end"); ar & CreateNVP(maxVals, "maxVals"); ar & CreateNVP(minVals, "minVals"); ar & CreateNVP(splitDim, "splitDim"); ar & CreateNVP(splitValue, "splitValue"); ar & CreateNVP(logNegError, "logNegError"); ar & CreateNVP(subtreeLeavesLogNegError, "subtreeLeavesLogNegError"); ar & CreateNVP(subtreeLeaves, "subtreeLeaves"); ar & CreateNVP(root, "root"); ar & CreateNVP(ratio, "ratio"); ar & CreateNVP(logVolume, "logVolume"); ar & CreateNVP(bucketTag, "bucketTag"); ar & CreateNVP(alphaUpper, "alphaUpper"); if (Archive::is_loading::value) { if (left) delete left; if (right) delete right; } ar & CreateNVP(left, "left"); ar & CreateNVP(right, "right"); }
void FastMKSModel::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(kernelType, "kernelType"); if (Archive::is_loading::value) { // Clean memory. if (linear) delete linear; if (polynomial) delete polynomial; if (cosine) delete cosine; if (gaussian) delete gaussian; if (epan) delete epan; if (triangular) delete triangular; if (hyptan) delete hyptan; linear = NULL; polynomial = NULL; cosine = NULL; gaussian = NULL; epan = NULL; triangular = NULL; hyptan = NULL; } // Serialize the correct model. switch (kernelType) { case LINEAR_KERNEL: ar & CreateNVP(linear, "linear_fastmks"); break; case POLYNOMIAL_KERNEL: ar & CreateNVP(polynomial, "polynomial_fastmks"); break; case COSINE_DISTANCE: ar & CreateNVP(cosine, "cosine_fastmks"); break; case GAUSSIAN_KERNEL: ar & CreateNVP(gaussian, "gaussian_fastmks"); break; case EPANECHNIKOV_KERNEL: ar & CreateNVP(epan, "epan_fastmks"); break; case TRIANGULAR_KERNEL: ar & CreateNVP(triangular, "triangular_fastmks"); break; case HYPTAN_KERNEL: ar & CreateNVP(hyptan, "hyptan_fastmks"); break; } }
void CoverTree<MetricType, StatisticType, MatType, RootPointPolicy>::Serialize( Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // If we're loading, and we have children, they need to be deleted. We may // also need to delete the local metric and dataset. if (Archive::is_loading::value) { for (size_t i = 0; i < children.size(); ++i) delete children[i]; if (localMetric && metric) delete metric; if (localDataset && dataset) delete dataset; } ar & CreateNVP(dataset, "dataset"); ar & CreateNVP(point, "point"); ar & CreateNVP(scale, "scale"); ar & CreateNVP(base, "base"); ar & CreateNVP(stat, "stat"); ar & CreateNVP(numDescendants, "numDescendants"); // Due to quirks of boost::serialization, depending on how the user // serializes the tree, it's possible that the root of the tree will // accidentally be serialized twice. So if we are a first-level child, we // avoid serializing the parent. The true (non-duplicated) parent will fix // the parent link. if (Archive::is_saving::value && parent != NULL && parent->Parent() == NULL) { CoverTree* fakeParent = NULL; ar & CreateNVP(fakeParent, "parent"); } else { ar & CreateNVP(parent, "parent"); } ar & CreateNVP(parentDistance, "parentDistance"); ar & CreateNVP(furthestDescendantDistance, "furthestDescendantDistance"); ar & CreateNVP(metric, "metric"); if (Archive::is_loading::value && parent == NULL) { localMetric = true; localDataset = true; } // Lastly, serialize the children. size_t numChildren = children.size(); ar & CreateNVP(numChildren, "numChildren"); if (Archive::is_loading::value) children.resize(numChildren); for (size_t i = 0; i < numChildren; ++i) { std::ostringstream oss; oss << "child" << i; ar & CreateNVP(children[i], oss.str()); } if (Archive::is_loading::value && parent == NULL) { // Look through each child individually. for (size_t i = 0; i < children.size(); ++i) { children[i]->localMetric = false; children[i]->localDataset = false; children[i]->Parent() = this; } } }
void RangeSearch<MetricType, MatType, TreeType>::Serialize( Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // Serialize preferences for search. ar & CreateNVP(naive, "naive"); ar & CreateNVP(singleMode, "singleMode"); // Reset base cases and scores if we are loading. if (Archive::is_loading::value) { baseCases = 0; scores = 0; } // If we are doing naive search, we serialize the dataset. Otherwise we // serialize the tree. if (naive) { if (Archive::is_loading::value) { if (setOwner && referenceSet) delete referenceSet; setOwner = true; } ar & CreateNVP(referenceSet, "referenceSet"); ar & CreateNVP(metric, "metric"); // If we are loading, set the tree to NULL and clean up memory if necessary. if (Archive::is_loading::value) { if (treeOwner && referenceTree) delete referenceTree; referenceTree = NULL; oldFromNewReferences.clear(); treeOwner = false; } } else { // Delete the current reference tree, if necessary and if we are loading. if (Archive::is_loading::value) { if (treeOwner && referenceTree) delete referenceTree; // After we load the tree, we will own it. treeOwner = true; } ar & CreateNVP(referenceTree, "referenceTree"); ar & CreateNVP(oldFromNewReferences, "oldFromNewReferences"); // If we are loading, set the dataset accordingly and clean up memory if // necessary. if (Archive::is_loading::value) { if (setOwner && referenceSet) delete referenceSet; referenceSet = &referenceTree->Dataset(); metric = referenceTree->Metric(); // Get the metric from the tree. setOwner = false; } } }
void BinarySpaceTree<MetricType, StatisticType, MatType, BoundType, SplitType>:: Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // If we're loading, and we have children, they need to be deleted. if (Archive::is_loading::value) { if (left) delete left; if (right) delete right; if (!parent) delete dataset; } ar & CreateNVP(parent, "parent"); ar & CreateNVP(begin, "begin"); ar & CreateNVP(count, "count"); ar & CreateNVP(bound, "bound"); ar & CreateNVP(stat, "statistic"); ar & CreateNVP(parentDistance, "parentDistance"); ar & CreateNVP(furthestDescendantDistance, "furthestDescendantDistance"); ar & CreateNVP(dataset, "dataset"); // Save children last; otherwise boost::serialization gets confused. ar & CreateNVP(left, "left"); ar & CreateNVP(right, "right"); // Due to quirks of boost::serialization, if a tree is saved as an object and // not a pointer, the first level of the tree will be duplicated on load. // Therefore, if we are the root of the tree, then we need to make sure our // children's parent links are correct, and delete the duplicated node if // necessary. if (Archive::is_loading::value) { // Get parents of left and right children, or, NULL, if they don't exist. BinarySpaceTree* leftParent = left ? left->Parent() : NULL; BinarySpaceTree* rightParent = right ? right->Parent() : NULL; // Reassign parent links if necessary. if (left && left->Parent() != this) left->Parent() = this; if (right && right->Parent() != this) right->Parent() = this; // Do we need to delete the left parent? if (leftParent != NULL && leftParent != this) { // Sever the duplicate parent's children. Ensure we don't delete the // dataset, by faking the duplicated parent's parent (that is, we need to // set the parent to something non-NULL; 'this' works). leftParent->Parent() = this; leftParent->Left() = NULL; leftParent->Right() = NULL; delete leftParent; } // Do we need to delete the right parent? if (rightParent != NULL && rightParent != this && rightParent != leftParent) { // Sever the duplicate parent's children, in the same way as above. rightParent->Parent() = this; rightParent->Left() = NULL; rightParent->Right() = NULL; delete rightParent; } } }
void LARS::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // If we're loading, we have to use the internal storage. if (Archive::is_loading::value) { matGram = &matGramInternal; ar & CreateNVP(matGramInternal, "matGramInternal"); } else { ar & CreateNVP(const_cast<arma::mat&>(*matGram), "matGramInternal"); } ar & CreateNVP(matUtriCholFactor, "matUtriCholFactor"); ar & CreateNVP(useCholesky, "useCholesky"); ar & CreateNVP(lasso, "lasso"); ar & CreateNVP(lambda1, "lambda1"); ar & CreateNVP(elasticNet, "elasticNet"); ar & CreateNVP(lambda2, "lambda2"); ar & CreateNVP(tolerance, "tolerance"); ar & CreateNVP(betaPath, "betaPath"); ar & CreateNVP(lambdaPath, "lambdaPath"); ar & CreateNVP(activeSet, "activeSet"); ar & CreateNVP(isActive, "isActive"); ar & CreateNVP(ignoreSet, "ignoreSet"); ar & CreateNVP(isIgnored, "isIgnored"); }