bool RandomForests::predict_(VectorDouble &inputVector){ predictedClassLabel = 0; maxLikelihood = 0; if( !trained ){ errorLog << "predict_(VectorDouble &inputVector) - Model Not Trained!" << std::endl; return false; } if( inputVector.getSize() != numInputDimensions ){ errorLog << "predict_(VectorDouble &inputVector) - The size of the input Vector (" << inputVector.getSize() << ") does not match the num features in the model (" << numInputDimensions << std::endl; return false; } if( useScaling ){ for(UINT n=0; n<numInputDimensions; n++){ inputVector[n] = grt_scale(inputVector[n], ranges[n].minValue, ranges[n].maxValue, 0.0, 1.0); } } if( classLikelihoods.getSize() != numClasses ) classLikelihoods.resize(numClasses,0); if( classDistances.getSize() != numClasses ) classDistances.resize(numClasses,0); std::fill(classDistances.begin(),classDistances.end(),0); //Run the prediction for each tree in the forest VectorDouble y; for(UINT i=0; i<forestSize; i++){ if( !forest[i]->predict(inputVector, y) ){ errorLog << "predict_(VectorDouble &inputVector) - Tree " << i << " failed prediction!" << std::endl; return false; } for(UINT j=0; j<numClasses; j++){ classDistances[j] += y[j]; } } //Use the class distances to estimate the class likelihoods bestDistance = 0; UINT bestIndex = 0; Float classNorm = 1.0 / Float(forestSize); for(UINT k=0; k<numClasses; k++){ classLikelihoods[k] = classDistances[k] * classNorm; if( classLikelihoods[k] > maxLikelihood ){ maxLikelihood = classLikelihoods[k]; bestDistance = classDistances[k]; bestIndex = k; } } predictedClassLabel = classLabels[ bestIndex ]; return true; }
bool LDA::predict(VectorDouble inputVector){ if( !trained ){ errorLog << "predict(vector< Float > inputVector) - LDA Model Not Trained!" << std::endl; return false; } predictedClassLabel = 0; maxLikelihood = -10000; if( !trained ) return false; if( inputVector.getSize() != numInputDimensions ){ errorLog << "predict(vector< Float > inputVector) - The size of the input vector (" << inputVector.getSize() << ") does not match the num features in the model (" << numInputDimensions << std::endl; return false; } //Make sure the likelihoods and distances vectors have been assigned if( classLikelihoods.getSize() != numClasses || classDistances.getSize() != numClasses ){ classLikelihoods.resize(numClasses); classDistances.resize(numClasses); } //Compute the linear scores for each class bestDistance = 0; maxLikelihood = 0; UINT bestIndex = 0; Float sum = 0; for(UINT k=0; k<numClasses; k++){ for(UINT j=0; j<numInputDimensions+1; j++){ if( j==0 ) classDistances[k] = models[k].weights[j]; else classDistances[k] += inputVector[j-1] * models[k].weights[j]; } classLikelihoods[k] = exp( classDistances[k] ); sum += classLikelihoods[k]; if( classLikelihoods[k] > maxLikelihood ){ bestIndex = k; maxLikelihood = classLikelihoods[k]; } } //Normalize the likelihoods for(UINT k=0; k<numClasses; k++){ classLikelihoods[k] /= sum; } maxLikelihood = classLikelihoods[ bestIndex ]; predictedClassLabel = models[ bestIndex ].classLabel; return true; }
bool ClassLabelChangeFilter::process(const VectorDouble &inputVector){ if( !initialized ){ errorLog << "process(const VectorDouble &inputVector) - Not initialized!" << std::endl; return false; } if( inputVector.getSize() != numInputDimensions ){ errorLog << "process(const VectorDouble &inputVector) - The size of the inputVector (" << inputVector.getSize() << ") does not match that of the filter (" << numInputDimensions << ")!" << std::endl; return false; } //Use only the first value (as that is the predicted class label) processedData[0] = filter( (UINT)inputVector[0] ); return true; }