Пример #1
0
bool Softmax::loadModelFromFile(fstream &file){
    
    trained = false;
    numFeatures = 0;
    numClasses = 0;
    models.clear();
    classLabels.clear();
    
    if(!file.is_open())
    {
        errorLog << "loadModelFromFile(string filename) - Could not open file to load model" << endl;
        return false;
    }
    
    std::string word;
    
    //Find the file type header
    file >> word;
    if(word != "GRT_SOFTMAX_MODEL_FILE_V1.0"){
        errorLog << "loadModelFromFile(string filename) - Could not find Model File Header" << endl;
        return false;
    }
    
    file >> word;
    if(word != "NumFeatures:"){
        errorLog << "loadModelFromFile(string filename) - Could not find NumFeatures!" << endl;
        return false;
    }
    file >> numFeatures;
    
    file >> word;
    if(word != "NumClasses:"){
        errorLog << "loadModelFromFile(string filename) - Could not find NumClasses!" << endl;
        return false;
    }
    file >> numClasses;
    
    file >> word;
    if(word != "UseScaling:"){
        errorLog << "loadModelFromFile(string filename) - Could not find UseScaling!" << endl;
        return false;
    }
    file >> useScaling;
    
    file >> word;
    if(word != "UseNullRejection:"){
        errorLog << "loadModelFromFile(string filename) - Could not find UseNullRejection!" << endl;
        return false;
    }
    file >> useNullRejection;
    
    ///Read the ranges if needed
    if( useScaling ){
        //Resize the ranges buffer
        ranges.resize(numFeatures);
        
        file >> word;
        if(word != "Ranges:"){
            errorLog << "loadModelFromFile(string filename) - Could not find the Ranges!" << endl;
            return false;
        }
        for(UINT n=0; n<ranges.size(); n++){
            file >> ranges[n].minValue;
            file >> ranges[n].maxValue;
        }
    }
    
    //Resize the buffer
    models.resize(numClasses);
    classLabels.resize(numClasses);
    
    //Load the models
    file >> word;
    if(word != "Models:"){
        errorLog << "loadModelFromFile(string filename) - Could not find the Models!" << endl;
        return false;
    }
    
    for(UINT k=0; k<numClasses; k++){
        file >> word;
        if(word != "ClassLabel:"){
            errorLog << "loadModelFromFile(string filename) - Could not find the ClassLabel for model: " << k << "!" << endl;
            return false;
        }
        file >> models[k].classLabel;
        classLabels[k] = models[k].classLabel;
        
        file >> word;
        if(word != "Weights:"){
            errorLog << "loadModelFromFile(string filename) - Could not find the Weights for model: " << k << "!" << endl;
            return false;
        }
        file >>  models[k].w0;
        
		models[k].N = numFeatures;
        models[k].w.resize( numFeatures );
        for(UINT n=0; n<numFeatures; n++){
            file >> models[k].w[n];
        }
    }
    
    //Recompute the null rejection thresholds
    recomputeNullRejectionThresholds();
    
    //Resize the prediction results to make sure it is setup for realtime prediction
    maxLikelihood = DEFAULT_NULL_LIKELIHOOD_VALUE;
    bestDistance = DEFAULT_NULL_DISTANCE_VALUE;
    classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
    classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
    
    //Flag that the model has been trained
    trained = true;
    
    return true;
}
Пример #2
0
Файл: KNN.cpp Проект: ios4u/grt
bool KNN::loadLegacyModelFromFile( fstream &file ){
    
    string word;
    
    //Find the file type header
    file >> word;
    if(word != "NumFeatures:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find NumFeatures!" << endl;
        return false;
    }
    file >> numInputDimensions;
    
    file >> word;
    if(word != "NumClasses:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find NumClasses!" << endl;
        return false;
    }
    file >> numClasses;
    
    file >> word;
    if(word != "K:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find K!" << endl;
        return false;
    }
    file >> K;
    
    file >> word;
    if(word != "DistanceMethod:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find DistanceMethod!" << endl;
        return false;
    }
    file >> distanceMethod;
    
    file >> word;
    if(word != "SearchForBestKValue:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find SearchForBestKValue!" << endl;
        return false;
    }
    file >> searchForBestKValue;
    
    file >> word;
    if(word != "MinKSearchValue:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find MinKSearchValue!" << endl;
        return false;
    }
    file >> minKSearchValue;
    
    file >> word;
    if(word != "MaxKSearchValue:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find MaxKSearchValue!" << endl;
        return false;
    }
    file >> maxKSearchValue;
    
    file >> word;
    if(word != "UseScaling:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find UseScaling!" << endl;
        return false;
    }
    file >> useScaling;
    
    file >> word;
    if(word != "UseNullRejection:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find UseNullRejection!" << endl;
        return false;
    }
    file >> useNullRejection;
    
    file >> word;
    if(word != "NullRejectionCoeff:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find NullRejectionCoeff!" << endl;
        return false;
    }
    file >> nullRejectionCoeff;
    
    ///Read the ranges if needed
    if( useScaling ){
        //Resize the ranges buffer
        ranges.resize( numInputDimensions );
        
        file >> word;
        if(word != "Ranges:"){
            errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find Ranges!" << endl;
            cout << "Word: " << word << endl;
            return false;
        }
        for(UINT n=0; n<ranges.size(); n++){
            file >> ranges[n].minValue;
            file >> ranges[n].maxValue;
        }
    }
    
    //Resize the buffers
    trainingMu.resize(numClasses,0);
    trainingSigma.resize(numClasses,0);
    
    file >> word;
    if(word != "TrainingMu:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find TrainingMu!" << endl;
        return false;
    }
    
    //Load the trainingMu data
    for(UINT j=0; j<numClasses; j++){
        file >> trainingMu[j];
    }
    
    file >> word;
    if(word != "TrainingSigma:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find TrainingSigma!" << endl;
        return false;
    }
    
    //Load the trainingSigma data
    for(UINT j=0; j<numClasses; j++){
        file >> trainingSigma[j];
    }
    
    file >> word;
    if(word != "NumTrainingSamples:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find NumTrainingSamples!" << endl;
        return false;
    }
    unsigned int numTrainingSamples = 0;
    file >> numTrainingSamples;
    
    file >> word;
    if(word != "TrainingData:"){
        errorLog << "loadLegacyModelFromFile(fstream &file) - Could not find TrainingData!" << endl;
        return false;
    }
    
    //Load the training data
    trainingData.setNumDimensions(numInputDimensions);
    unsigned int classLabel = 0;
    vector< double > sample(numInputDimensions,0);
    for(UINT i=0; i<numTrainingSamples; i++){
        //Read the class label
        file >> classLabel;
        
        //Read the feature vector
        for(UINT j=0; j<numInputDimensions; j++){
            file >> sample[j];
        }
        
        //Add it to the training data
        trainingData.addSample(classLabel, sample);
    }
    
    //Flag that the model has been trained
    trained = true;
    
    //Compute the null rejection thresholds
    recomputeNullRejectionThresholds();
    
    return true;
}
Пример #3
0
bool MinDist::loadLegacyModelFromFile( std::fstream &file ){
    
    std::string word;
    
    file >> word;
    if(word != "NumFeatures:"){
        errorLog << "loadModelFromFile(string filename) - Could not find NumFeatures " << std::endl;
        return false;
    }
    file >> numInputDimensions;
    
    file >> word;
    if(word != "NumClasses:"){
        errorLog << "loadModelFromFile(string filename) - Could not find NumClasses" << std::endl;
        return false;
    }
    file >> numClasses;
    
    file >> word;
    if(word != "UseScaling:"){
        errorLog << "loadModelFromFile(string filename) - Could not find UseScaling" << std::endl;
        return false;
    }
    file >> useScaling;
    
    file >> word;
    if(word != "UseNullRejection:"){
        errorLog << "loadModelFromFile(string filename) - Could not find UseNullRejection" << std::endl;
        return false;
    }
    file >> useNullRejection;
    
    ///Read the ranges if needed
    if( useScaling ){
        //Resize the ranges buffer
        ranges.resize(numInputDimensions);
        
        file >> word;
        if(word != "Ranges:"){
            errorLog << "loadModelFromFile(string filename) - Could not find the Ranges" << std::endl;
            return false;
        }
        for(UINT n=0; n<ranges.size(); n++){
            file >> ranges[n].minValue;
            file >> ranges[n].maxValue;
        }
    }
    
    //Resize the buffer
    models.resize(numClasses);
    classLabels.resize(numClasses);
    
    //Load each of the K models
    for(UINT k=0; k<numClasses; k++){
        Float rejectionThreshold;
        Float gamma;
        Float trainingSigma;
        Float trainingMu;
        
        file >> word;
        if( word != "ClassLabel:" ){
            errorLog << "loadModelFromFile(string filename) - Could not load the class label for class " << k << std::endl;
            return false;
        }
        file >> classLabels[k];
        
        file >> word;
        if( word != "NumClusters:" ){
            errorLog << "loadModelFromFile(string filename) - Could not load the NumClusters for class " << k << std::endl;
            return false;
        }
        file >> numClusters;
        
        file >> word;
        if( word != "RejectionThreshold:" ){
            errorLog << "loadModelFromFile(string filename) - Could not load the RejectionThreshold for class " << k << std::endl;
            return false;
        }
        file >> rejectionThreshold;
        
        file >> word;
        if( word != "Gamma:" ){
            errorLog << "loadModelFromFile(string filename) - Could not load the Gamma for class " << k << std::endl;
            return false;
        }
        file >> gamma;
        
        file >> word;
        if( word != "TrainingMu:" ){
            errorLog << "loadModelFromFile(string filename) - Could not load the TrainingMu for class " << k << std::endl;
            return false;
        }
        file >> trainingMu;
        
        file >> word;
        if( word != "TrainingSigma:" ){
            errorLog << "loadModelFromFile(string filename) - Could not load the TrainingSigma for class " << k << std::endl;
            return false;
        }
        file >> trainingSigma;
        
        file >> word;
        if( word != "ClusterData:" ){
            errorLog << "loadModelFromFile(string filename) - Could not load the ClusterData for class " << k << std::endl;
            return false;
        }
        
        //Load the cluster data
        MatrixFloat clusters(numClusters,numInputDimensions);
        for(UINT i=0; i<numClusters; i++){
            for(UINT j=0; j<numInputDimensions; j++){
                file >> clusters[i][j];
            }
        }
        
        models[k].setClassLabel( classLabels[k] );
        models[k].setClusters( clusters );
        models[k].setGamma( gamma );
        models[k].setRejectionThreshold( rejectionThreshold );
        models[k].setTrainingSigma( trainingSigma );
        models[k].setTrainingMu( trainingMu );
    }
    
    //Recompute the null rejection thresholds
    recomputeNullRejectionThresholds();
    
    //Resize the prediction results to make sure it is setup for realtime prediction
    maxLikelihood = DEFAULT_NULL_LIKELIHOOD_VALUE;
    bestDistance = DEFAULT_NULL_DISTANCE_VALUE;
    classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
    classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
    
    trained = true;
    
    return true;
}
Пример #4
0
bool MinDist::loadModelFromFile( std::fstream &file ){
    
    clear();
    
    if(!file.is_open())
    {
        errorLog << "loadModelFromFile(string filename) - Could not open file to load model" << std::endl;
        return false;
    }
    
    std::string word;
    
    //Load the file header
    file >> word;
    
    //Check to see if we should load a legacy file
    if( word == "GRT_MINDIST_MODEL_FILE_V1.0" ){
        return loadLegacyModelFromFile( file );
    }
    
    //Find the file type header
    if(word != "GRT_MINDIST_MODEL_FILE_V2.0"){
        errorLog << "loadModelFromFile(string filename) - Could not find Model File Header" << std::endl;
        return false;
    }
    
    //Load the base settings from the file
    if( !Classifier::loadBaseSettingsFromFile(file) ){
        errorLog << "loadModelFromFile(string filename) - Failed to load base settings from file!" << std::endl;
        return false;
    }
    
    if( trained ){
        
        //Resize the buffer
        models.resize(numClasses);
        classLabels.resize(numClasses);
        
        //Load each of the K models
        for(UINT k=0; k<numClasses; k++){
            Float rejectionThreshold;
            Float gamma;
            Float trainingSigma;
            Float trainingMu;
            
            file >> word;
            if( word != "ClassLabel:" ){
                errorLog << "loadModelFromFile(string filename) - Could not load the class label for class " << k << std::endl;
                return false;
            }
            file >> classLabels[k];
            
            file >> word;
            if( word != "NumClusters:" ){
                errorLog << "loadModelFromFile(string filename) - Could not load the NumClusters for class " << k << std::endl;
                return false;
            }
            file >> numClusters;
            
            file >> word;
            if( word != "RejectionThreshold:" ){
                errorLog << "loadModelFromFile(string filename) - Could not load the RejectionThreshold for class " << k << std::endl;
                return false;
            }
            file >> rejectionThreshold;
            
            file >> word;
            if( word != "Gamma:" ){
                errorLog << "loadModelFromFile(string filename) - Could not load the Gamma for class " << k << std::endl;
                return false;
            }
            file >> gamma;
            
            file >> word;
            if( word != "TrainingMu:" ){
                errorLog << "loadModelFromFile(string filename) - Could not load the TrainingMu for class " << k << std::endl;
                return false;
            }
            file >> trainingMu;
            
            file >> word;
            if( word != "TrainingSigma:" ){
                errorLog << "loadModelFromFile(string filename) - Could not load the TrainingSigma for class " << k << std::endl;
                return false;
            }
            file >> trainingSigma;
            
            file >> word;
            if( word != "ClusterData:" ){
                errorLog << "loadModelFromFile(string filename) - Could not load the ClusterData for class " << k << std::endl;
                return false;
            }
            
            //Load the cluster data
            MatrixFloat clusters(numClusters,numInputDimensions);
            for(UINT i=0; i<numClusters; i++){
                for(UINT j=0; j<numInputDimensions; j++){
                    file >> clusters[i][j];
                }
            }
            
            models[k].setClassLabel( classLabels[k] );
            models[k].setClusters( clusters );
            models[k].setGamma( gamma );
            models[k].setRejectionThreshold( rejectionThreshold );
            models[k].setTrainingSigma( trainingSigma );
            models[k].setTrainingMu( trainingMu );
        }
        
        //Recompute the null rejection thresholds
        recomputeNullRejectionThresholds();
        
        //Resize the prediction results to make sure it is setup for realtime prediction
        maxLikelihood = DEFAULT_NULL_LIKELIHOOD_VALUE;
        bestDistance = DEFAULT_NULL_DISTANCE_VALUE;
        classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
        classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
    }
    
    return true;
}
Пример #5
0
bool KNN::train_(LabelledClassificationData &trainingData,UINT K){

    //Clear any previous models
    clear();

    if( trainingData.getNumSamples() == 0 ){
        errorLog << "train(LabelledClassificationData &trainingData) - Training data has zero samples!" << endl;
        return false;
    }

    //Set the dimensionality of the input data
    this->K = K;
    this->numFeatures = trainingData.getNumDimensions();
    this->numClasses = trainingData.getNumClasses();

    //TODO: In the future need to build a kdtree from the training data to allow better realtime prediction
    this->trainingData = trainingData;

    if( useScaling ){
        ranges = this->trainingData.getRanges();
        this->trainingData.scale(ranges, 0, 1);
    }

    //Set the class labels
    classLabels.resize(numClasses);
    for(UINT k=0; k<numClasses; k++){
        classLabels[k] = trainingData.getClassTracker()[k].classLabel;
    }

    //Flag that the algorithm has been trained so we can compute the rejection thresholds
    trained = true;
    
    //If null rejection is enabled then compute the null rejection thresholds
    if( useNullRejection ){

        //Set the null rejection to false so we can compute the values for it (this will be set back to its current value later)
        bool tempUseNullRejection = useNullRejection;
        useNullRejection = false;
        rejectionThresholds.clear();

        //Compute the rejection thresholds for each of the K classes
        vector< double > counter(numClasses,0);
        trainingMu.resize( numClasses, 0 );
        trainingSigma.resize( numClasses, 0 );
        rejectionThresholds.resize( numClasses, 0 );

        //Compute Mu for each of the classes
        const unsigned int numTrainingExamples = trainingData.getNumSamples();
        vector< IndexedDouble > predictionResults( numTrainingExamples );
        for(UINT i=0; i<numTrainingExamples; i++){
            predict( trainingData[i].getSample(), K);

            UINT classLabelIndex = 0;
            for(UINT k=0; k<numClasses; k++){
                if( predictedClassLabel == classLabels[k] ){
                    classLabelIndex = k;
                    break;
                }
            }

            predictionResults[ i ].index = classLabelIndex;
            predictionResults[ i ].value = classDistances[ classLabelIndex ];

            trainingMu[ classLabelIndex ] += predictionResults[ i ].value;
            counter[ classLabelIndex ]++;
        }

        for(UINT j=0; j<numClasses; j++){
            trainingMu[j] /= counter[j];
        }

        //Compute Sigma for each of the classes
        for(UINT i=0; i<numTrainingExamples; i++){
            trainingSigma[predictionResults[i].index] += SQR(predictionResults[i].value - trainingMu[predictionResults[i].index]);
        }

        for(UINT j=0; j<numClasses; j++){
            double count = counter[j];
            if( count > 1 ){
                trainingSigma[ j ] = sqrt( trainingSigma[j] / (count-1) );
            }else{
                trainingSigma[ j ] = 1.0;
            }
        }

        //Check to see if any of the mu or sigma values are zero or NaN
        bool errorFound = false;
        for(UINT j=0; j<numClasses; j++){
            if( trainingMu[j] == 0 ){
                warningLog << "TrainingMu[ " << j << " ] is zero for a K value of " << K << endl;
            }
            if( trainingSigma[j] == 0 ){
                warningLog << "TrainingSigma[ " << j << " ] is zero for a K value of " << K << endl;
            }
            if( isnan( trainingMu[j] ) ){
                errorLog << "TrainingMu[ " << j << " ] is NAN for a K value of " << K << endl;
                errorFound = true;
            }
            if( isnan( trainingSigma[j] ) ){
                errorLog << "TrainingSigma[ " << j << " ] is NAN for a K value of " << K << endl;
                errorFound = true;
            }
        }

        if( errorFound ){
            trained = false;
            return false;
        }

        //Recompute the rejection thresholds
        recomputeNullRejectionThresholds();

        //Restore the actual state of the null rejection
        useNullRejection = tempUseNullRejection;
        
    }else{
        //Resize the rejection thresholds but set the values to 0
        rejectionThresholds.clear();
        rejectionThresholds.resize( numClasses, 0 );
    }

    return true;
}
Пример #6
0
bool ANBC::loadModelFromFile(fstream &file){
    
    trained = false;
    numFeatures = 0;
    numClasses = 0;
    models.clear();
    classLabels.clear();
    
    if(!file.is_open())
    {
        errorLog << "loadANBCModelFromFile(string filename) - Could not open file to load model" << endl;
        return false;
    }
    
    std::string word;
    
    //Find the file type header
    file >> word;
    if(word != "GRT_ANBC_MODEL_FILE_V1.0"){
        errorLog << "loadANBCModelFromFile(string filename) - Could not find Model File Header" << endl;
        return false;
    }
    
    file >> word;
    if(word != "NumFeatures:"){
        errorLog << "loadANBCModelFromFile(string filename) - Could not find NumFeatures " << endl;
        return false;
    }
    file >> numFeatures;
    
    file >> word;
    if(word != "NumClasses:"){
        errorLog << "loadANBCModelFromFile(string filename) - Could not find NumClasses" << endl;
        return false;
    }
    file >> numClasses;
    
    file >> word;
    if(word != "UseScaling:"){
        errorLog << "loadANBCModelFromFile(string filename) - Could not find UseScaling" << endl;
        return false;
    }
    file >> useScaling;
    
    file >> word;
    if(word != "UseNullRejection:"){
        errorLog << "loadANBCModelFromFile(string filename) - Could not find UseNullRejection" << endl;
        return false;
    }
    file >> useNullRejection;
    
    ///Read the ranges if needed
    if( useScaling ){
        //Resize the ranges buffer
        ranges.resize(numFeatures);
        
        file >> word;
        if(word != "Ranges:"){
            errorLog << "loadANBCModelFromFile(string filename) - Could not find the Ranges" << endl;
            return false;
        }
        for(UINT n=0; n<ranges.size(); n++){
            file >> ranges[n].minValue;
            file >> ranges[n].maxValue;
        }
    }
    
    //Resize the buffer
    models.resize(numClasses);
    classLabels.resize(numClasses);
    
    //Load each of the K models
    for(UINT k=0; k<numClasses; k++){
        UINT modelID;
        file >> word;
        if(word != "*************_MODEL_*************"){
            errorLog << "loadANBCModelFromFile(string filename) - Could not find header for the "<<k+1<<"th model" << endl;
            return false;
        }
        
        file >> word;
        if(word != "Model_ID:"){
            errorLog << "loadANBCModelFromFile(string filename) - Could not find model ID for the "<<k+1<<"th model" << endl;
            return false;
        }
        file >> modelID;
        
        if(modelID-1!=k){
            cout<<"ANBC: Model ID does not match the current class ID for the "<<k+1<<"th model" << endl;
            return false;
        }
        
        file >> word;
        if(word != "N:"){
            cout<<"ANBC: Could not find N for the "<<k+1<<"th model" << endl;
            return false;
        }
        file >> models[k].N;
        
        file >> word;
        if(word != "ClassLabel:"){
            errorLog << "loadANBCModelFromFile(string filename) - Could not find ClassLabel for the "<<k+1<<"th model" << endl;
            return false;
        }
        file >> models[k].classLabel;
        classLabels[k] = models[k].classLabel;
        
        file >> word;
        if(word != "Threshold:"){
            errorLog << "loadANBCModelFromFile(string filename) - Could not find the threshold for the "<<k+1<<"th model" << endl;
            return false;
        }
        file >> models[k].threshold;
        
        file >> word;
        if(word != "Gamma:"){
            errorLog << "loadANBCModelFromFile(string filename) - Could not find the gamma parameter for the "<<k+1<<"th model" << endl;
            return false;
        }
        file >> models[k].gamma;
        
        file >> word;
        if(word != "TrainingMu:"){
            errorLog << "loadANBCModelFromFile(string filename) - Could not find the training mu parameter for the "<<k+1<<"th model" << endl;
            return false;
        }
        file >> models[k].trainingMu;
        
        file >> word;
        if(word != "TrainingSigma:"){
            errorLog << "loadANBCModelFromFile(string filename) - Could not find the training sigma parameter for the "<<k+1<<"th model" << endl;
            return false;
        }
        file >> models[k].trainingSigma;
        
        //Resize the buffers
        models[k].mu.resize(numFeatures);
        models[k].sigma.resize(numFeatures);
        models[k].weights.resize(numFeatures);
        
        //Load Mu, Sigma and Weights
        file >> word;
        if(word != "Mu:"){
            errorLog << "loadANBCModelFromFile(string filename) - Could not find the Mu vector for the "<<k+1<<"th model" << endl;
            return false;
        }
        
        //Load Mu
        for(UINT j=0; j<models[k].N; j++){
            double value;
            file >> value;
            models[k].mu[j] = value;
        }
        
        file >> word;
        if(word != "Sigma:"){
            errorLog << "loadANBCModelFromFile(string filename) - Could not find the Sigma vector for the "<<k+1<<"th model" << endl;
            return false;
        }
        
        //Load Sigma
        for(UINT j=0; j<models[k].N; j++){
            double value;
            file >> value;
            models[k].sigma[j] = value;
        }
        
        file >> word;
        if(word != "Weights:"){
            errorLog << "loadANBCModelFromFile(string filename) - Could not find the Weights vector for the "<<k+1<<"th model" << endl;
            return false;
        }
        
        //Load Weights
        for(UINT j=0; j<models[k].N; j++){
            double value;
            file >> value;
            models[k].weights[j] = value;
        }
        
        file >> word;
        if(word != "*********************************"){
            errorLog << "loadANBCModelFromFile(string filename) - Could not find the model footer for the "<<k+1<<"th model" << endl;
            return false;
        }
    }
    
    //Flag that the model is trained
    trained = true;
    
    //Recompute the null rejection thresholds
    recomputeNullRejectionThresholds();
    
    //Resize the prediction results to make sure it is setup for realtime prediction
    maxLikelihood = DEFAULT_NULL_LIKELIHOOD_VALUE;
    bestDistance = DEFAULT_NULL_DISTANCE_VALUE;
    classLikelihoods.resize(numClasses,DEFAULT_NULL_LIKELIHOOD_VALUE);
    classDistances.resize(numClasses,DEFAULT_NULL_DISTANCE_VALUE);
    
    return true;
}