void ClassifierMLP::Train(std::vector< fvec > samples, ivec labels) { u32 sampleCnt = samples.size(); if(!sampleCnt) return; DEL(mlp); dim = samples[0].size(); CvMat *layers; // if(neuronCount == 3) neuronCount = 2; // don't ask me why but 3 neurons mess up everything... if(!layerCount || neuronCount < 2) { layers = cvCreateMat(2,1,CV_32SC1); cvSet1D(layers, 0, cvScalar(dim)); cvSet1D(layers, 1, cvScalar(1)); } else { layers = cvCreateMat(2+layerCount,1,CV_32SC1); cvSet1D(layers, 0, cvScalar(dim)); cvSet1D(layers, layerCount+1, cvScalar(1)); FOR(i, layerCount) cvSet1D(layers, i+1, cvScalar(neuronCount)); } u32 *perm = randPerm(sampleCnt); CvMat *trainSamples = cvCreateMat(sampleCnt, dim, CV_32FC1); CvMat *trainLabels = cvCreateMat(labels.size(), 1, CV_32FC1); CvMat *sampleWeights = cvCreateMat(samples.size(), 1, CV_32FC1); FOR(i, sampleCnt) { FOR(d, dim) cvSetReal2D(trainSamples, i, d, samples[perm[i]][d]); cvSet1D(trainLabels, i, cvScalar(labels[perm[i]])); cvSet1D(sampleWeights, i, cvScalar(1)); }
void Cas1DVanishingPoint::findInteriorVanishingPt(const std::vector<cv::Vec4i> & lines, float & intVPTheta, float & intVPRho) const { float p = 0.995f; float r = 2.0f / lines.size(); float k = log(1.0f - p) / log(1.0f - r * r); size_t max_iter = 1000; size_t it = 0; size_t max_inliers = 2; float best_theta, best_rho; while (it < k && it < max_iter) { std::vector<size_t> perm = randPerm(lines.size()); std::vector<cv::Vec4i> samples; samples.push_back(lines[perm[0]]); samples.push_back(lines[perm[1]]); cv::Point2f guess = convergeLines(samples); float th = atan2(guess.y, guess.x); float rh = hypot(guess.x, guess.y); if (rh > mInteriorRadius) continue; size_t inliers = linesSupport(th, rh, lines).size(); // std::cout << cv::Mat(guess) << inliers << std::endl; // showLines(linesSupport(guess)); if (inliers > max_inliers) { max_inliers = inliers; best_theta = th; best_rho = rh; r = (float)max_inliers / lines.size(); k = log(1.0f - p) / log(1.0f - r * r); it = 0; } // std::cout << k << " " << it << std::endl; it++; } intVPTheta = best_theta; intVPRho = best_rho; }
void FilterBoostLearner::filter( InputData* pData, int size, bool rejection ) { pData->clearIndexSet(); const int numExamples = pData->getNumExamples(); const int numClasses = pData->getNumClasses(); set<int> indexSet; if (size<numExamples) // not whole dataset will be used { //random permutation vector< pair<int,int> > tmpRandomArr( numExamples ); for( int i=0; i < numExamples; i++ ) { tmpRandomArr[i].first = rand(); tmpRandomArr[i].second = i; } sort( tmpRandomArr.begin(), tmpRandomArr.end(), nor_utils::comparePair<1, int, int, less<int> >() ); vector< int > randPerm( numExamples ); for( int i=0; i<numExamples; i++ ) { randPerm[i] = tmpRandomArr[i].second; } //end: random permutation int iter = 0; int maxIter = 5 * size; int wholeIter = 0; indexSet.clear(); while (1) { if ( size<=indexSet.size() ) break; if ( wholeIter > 5 ) rejection = false; if ( numExamples <= iter ) { iter = 0; wholeIter++; } if ( rejection ) { const vector<Label>& labels = pData->getLabels( randPerm[iter] ); vector<Label>::const_iterator lIt; AlphaReal scalar = 0.0; //float scalar = numeric_limits<float>::max(); for ( lIt = labels.begin(); lIt != labels.end(); ++lIt ) { //if ( scalar > _margins[ randPerm[iter] ][lIt->idx] ) scalar = _margins[ randPerm[iter] ][lIt->idx]; //if ( _margins[ randPerm[iter] ][lIt->idx] < 0.0 ) scalar += _margins[ randPerm[iter] ][lIt->idx]; scalar += (1 / ( 1 + exp(_margins[ randPerm[iter] ][lIt->idx]))); } AlphaReal qValue = scalar / (AlphaReal) numClasses; //AlphaReal qValue = 1 / ( 1 + exp( scalar ) ); AlphaReal randNum = (AlphaReal)rand() / RAND_MAX; if ( randNum < qValue ) indexSet.insert( randPerm[iter] ); } else { indexSet.insert( randPerm[iter] ); } iter++; } // normalize the weights of the labels set<int>::iterator sIt; AlphaReal sum = 0.0; // for each example are in use for ( sIt = indexSet.begin(); sIt != indexSet.end(); sIt++ ) { vector<Label>& labels = pData->getLabels(*sIt); vector<Label>::iterator lIt; for (lIt = labels.begin(); lIt != labels.end(); ++lIt ) { lIt->weight = 1 /( 1+exp( _margins[ *sIt ][lIt->idx] ) ); sum += lIt->weight; } } for ( sIt = indexSet.begin(); sIt != indexSet.end(); sIt++ ) { vector<Label>& labels = pData->getLabels(*sIt); vector<Label>::iterator lIt; for (lIt = labels.begin(); lIt != labels.end(); ++lIt ) { lIt->weight /= sum; } } pData->loadIndexSet( indexSet ); } else { // the whole dataset is used AlphaReal sum = 0.0; // for each example are in use for ( int i = 0; i < pData->getNumExamples(); ++i ) { vector<Label>& labels = pData->getLabels(i); vector<Label>::iterator lIt; for (lIt = labels.begin(); lIt != labels.end(); ++lIt ) { lIt->weight = 1 /( 1+exp( _margins[ i ][lIt->idx] ) ); sum += lIt->weight; } } for ( int i = 0; i < pData->getNumExamples(); ++i ) { vector<Label>& labels = pData->getLabels( i ); vector<Label>::iterator lIt; for (lIt = labels.begin(); lIt != labels.end(); ++lIt ) { lIt->weight /= sum; } } } /* sum = 0.0; for ( int i=0; i < pData->getNumExamples(); i++ ) { vector<Label>& labels = pData->getLabels(i); vector<Label>::iterator lIt; for (lIt = labels.begin(); lIt != labels.end(); ++lIt ) { sum += lIt->weight; } } cout << "The size of the dataset: " << pData->getNumExamples() << endl; cout << "Sum: " << sum << endl; */ }