void RandomNaiveBayes::initialize(const int numSamples) { m_confidences.resize(numSamples, m_hp.numClasses); m_predictions.resize(numSamples); for (int n = 0; n < numSamples; n++) { for (int m = 0; m < m_hp.numClasses; m++) { m_confidences(n, m) = 0.0; } } }
void Forest::initialize(const long int numSamples) { m_confidences.resize(numSamples, m_hp.numClasses); m_predictions.resize(numSamples); for (long int n = 0; n < numSamples; n++) { for (int m = 0; m < m_hp.numClasses; m++) { m_confidences(n, m) = 0; } } }
void RandomNaiveBayes::eval(const matrix<float>& data, const std::vector<int>& labels) { m_confidences.resize(data.size1(), m_hp.numClasses); m_predictions.resize(data.size1()); // init to one (due to multiplication) for (int sample = 0; sample < (int)data.size1(); sample++) { for ( int c = 0; c < m_hp.numClasses; c++) { m_confidences(sample,c) = 0.0; } } BOOST_FOREACH(NaiveBayes::Ptr nbc, m_naiveBayesClassifiers) { nbc->eval(data, m_confidences); }
blitz::TinyVector<double,2> bob::ip::gabor::JetStatistics::disparity(const boost::shared_ptr<bob::ip::gabor::Jet> jet) const{ if (!m_gwt) throw std::runtime_error("The Gabor wavelet transform class has not been set jet"); if (m_gwt->numberOfWavelets() != jet->length()) throw std::runtime_error((boost::format("The given Gabor jet is of length %d, but the transform has %d wavelets; forgot to set your custom Transform") % jet->length() % m_gwt->numberOfWavelets()).str()); // compute confidences and phase differences once m_confidences.resize(m_meanAbs.shape()); m_phaseDifferences.resize(m_meanPhase.shape()); m_confidences = m_meanAbs * jet->abs(); m_phaseDifferences = m_meanPhase - jet->phase(); double gamma_y_y = 0., gamma_y_x = 0., gamma_x_x = 0., phi_y = 0., phi_x = 0.; blitz::TinyVector<double,2> disparity(0., 0.); auto kernels = m_gwt->waveletFrequencies(); // iterate through the Gabor jet **backwards** (from highest scale to lowest scale) for (int j = jet->length()-1, scale = m_gwt->numberOfScales(); scale--;){ for (int direction = m_gwt->numberOfDirections(); direction--; --j){ const double kjy = kernels[j][0], kjx = kernels[j][1]; const double conf = m_confidences(j), diff = m_phaseDifferences(j), var = m_varPhase(j); // totalize Gamma matrix gamma_y_y += conf * kjy * kjy / var; gamma_y_x += conf * kjy * kjx / var; gamma_x_x += conf * kjx * kjx / var; // totalize phi vector // estimate the number of cycles that we are off (using the current estimation of the disparity double n = round((diff - disparity[0] * kjy - disparity[1] * kjx) / (2.*M_PI)); // totalize corrected phi vector elements phi_y += conf * (diff - n * 2. * M_PI) * kjy / var; phi_x += conf * (diff - n * 2. * M_PI) * kjx / var; } // re-calculate disparity as d=\Gamma^{-1}\Phi of the (low frequency) wavelet scales that we used up to now double gamma_det = gamma_x_x * gamma_y_y - sqr(gamma_y_x); disparity[0] = (gamma_x_x * phi_y - gamma_y_x * phi_x) / gamma_det; disparity[1] = (gamma_y_y * phi_x - gamma_y_x * phi_y) / gamma_det; } return disparity; }