double LinearFA::value(const std::vector<double>& parameters,const std::vector<double>& observations) { vector<double> features = computeFeatures ( observations ); return inner_product(parameters.begin(),parameters.end(),features.begin(),0.); }
void Texton::train(const QVector< Image< float > >& lab_images, const QVector<QString> & names, int n_samples ) { QVector< float > features; int feature_size = feature_->size(); MatrixXd covariance; computeFeatures( features, mean_, covariance, feature_, lab_images, names, n_samples ); int n_features = features.count() / feature_size; // Do propper whitening: http://en.wikipedia.org/wiki/White_noise#Whitening_a_random_vector // // No Whitening // transformation_ = MatrixXd::Identity( feature_size, feature_size ); // // Simple Whitening // transformation_ = covariance.diagonal().array().inverse().sqrt().matrix().asDiagonal(); // True Whitening JacobiSVD< MatrixXd > svd( covariance, ComputeThinU | ComputeThinV ); transformation_ = svd.singularValues().array().inverse().sqrt().matrix().asDiagonal() * svd.matrixV().transpose(); // Whitening (zero mean, 1 stddev) for( int i=0, k=0; i<n_features; i++ ){ VectorXd x( feature_size ); for( int j=0,kk=k; j<feature_size; j++, kk++ ) x[j] = features[kk]; x = transformation_*(x-mean_); for( int j=0; j<feature_size; j++, k++ ) features[k] = x[j]; } // Train the texton directory kmeans_.train( features.data(), feature_size, n_features, N_ ); }
// check compute features interface method TEST(TEST_CASE_NAME, compute_features) { auto ptr = create_brisque(); cv::Mat features; ptr->computeFeatures(get_testfile_1a(), features); EXPECT_EQ(features.rows, 1); EXPECT_EQ(features.cols, 36); }
AnimationResource::AnimationResource(const Common::String &filename) : Resource(filename, Resource::TYPE_ANIMATION), Common::XMLParser(), _valid(false) { // Get a pointer to the package manager _pPackage = Kernel::getInstance()->getPackage(); assert(_pPackage); // Switch to the folder the specified Xml fiile is in Common::String oldDirectory = _pPackage->getCurrentDirectory(); if (getFileName().contains('/')) { Common::String dir = Common::String(getFileName().c_str(), strrchr(getFileName().c_str(), '/')); _pPackage->changeDirectory(dir); } // Load the contents of the file uint fileSize; char *xmlData = _pPackage->getXmlFile(getFileName(), &fileSize); if (!xmlData) { error("Could not read \"%s\".", getFileName().c_str()); return; } // Parse the contents if (!loadBuffer((const byte *)xmlData, fileSize)) return; _valid = parse(); close(); free(xmlData); // Switch back to the previous folder _pPackage->changeDirectory(oldDirectory); // Give an error message if there weren't any frames specified if (_frames.empty()) { error("\"%s\" does not have any frames.", getFileName().c_str()); return; } // Pre-cache all the frames if (!precacheAllFrames()) { error("Could not precache all frames of \"%s\".", getFileName().c_str()); return; } // Post processing to compute animation features if (!computeFeatures()) { error("Could not determine the features of \"%s\".", getFileName().c_str()); return; } _valid = true; }
// TODO Enum for is_test_phase PointCloud<Signature>::Ptr Detector::obtainFeatures(Scene &scene, PointCloud<PointNormal>::Ptr keypoints, bool is_test_phase, bool cache) { if (cache == false) { PointCloud<Signature>::Ptr features = computeFeatures(scene.cloud, keypoints); return features; } else { std::string name_str = std::string(feature_est_->name_) + scene.id; if (is_test_phase) { name_str += "_test"; } else { name_str += "_train"; } name_str += ".pcd"; const char *name = name_str.c_str(); if (ifstream(name)) { PointCloud<Signature>::Ptr features (new PointCloud<Signature>()); PCDReader r; r.readEigen(std::string(name), *features); //*features = *tmp; //io::loadPCDFile(name, *features); if (features->size() != keypoints->size()) assert(false); cout << "got " << features->size() << " features from " << keypoints->size() << " points" << endl; return features; } else { PointCloud<Signature>::Ptr features = computeFeatures(scene.cloud, keypoints); PCDWriter w; w.writeASCIIEigen(std::string(name), *features); return features; } } }
void SegmentPairEndFeatureExtractor::updateOutputs(){ LOG_DEBUG(segmentpairendfeatureextractorlog) << "extracting features" << std::endl; unsigned int numSegmentPairEndFeatures = 5; _features->clear(); _features->resize(_segments->size(), numSegmentPairEndFeatures); _features->addName("spe is segment pair end"); _features->addName("spe area of end segment"); _features->addName("spe change of area"); _features->addName("spe change of area / area_cts"); _features->addName("spe change of area / area_end"); foreach (boost::shared_ptr<EndSegment> segment, _segments->getEnds()) computeFeatures(segment, _features->get(segment->getId())); foreach (boost::shared_ptr<ContinuationSegment> segment, _segments->getContinuations()) computeFeatures(segment, _features->get(segment->getId())); foreach (boost::shared_ptr<BranchSegment> segment, _segments->getBranches()) computeFeatures(segment, _features->get(segment->getId())); foreach (boost::shared_ptr<SegmentPair> segment, _segments->getSegmentPairs()) computeFeatures(segment, _features->get(segment->getId())); foreach (boost::shared_ptr<SegmentPairEnd> segment, _segments->getSegmentPairEnds()) computeFeatures(segment, _features->get(segment->getId())); LOG_ALL(segmentpairendfeatureextractorlog) << "found features: " << *_features << std::endl; LOG_DEBUG(segmentpairendfeatureextractorlog) << "done" << std::endl; }
/*! Construtor */ Train::Train(Config config){ clock_t start, finish; start = clock(); this->conf = config; hog.winSize = cv::Size(config.hog_wsize, config.hog_hsize); positives = 0; negatives = 0; getSamples(); computeFeatures(); //prepareData(); trainWithDescriptors(); finish = clock(); std::cout << "Tempo utilizado (segundos): "<< (finish - start)/CLOCKS_PER_SEC << std::endl; }
bool StompOptimizationTask::execute(std::vector<Eigen::VectorXd>& parameters, std::vector<Eigen::VectorXd>& projected_parameters, Eigen::VectorXd& costs, Eigen::MatrixXd& weighted_feature_values, const int iteration_number, const int rollout_number, int thread_id, bool compute_gradients, std::vector<Eigen::VectorXd>& gradients, bool& validity) { int rollout_id = num_rollouts_; if (rollout_number >= 0) { rollout_id = rollout_number; last_executed_rollout_ = rollout_number; } computeFeatures(parameters, rollout_id, validity); computeCosts(trajectories_[rollout_id]->features_, costs, weighted_feature_values); return true; }
bool ImageDataFloat2ndStageBinaryCombined::setConfiguration(ConfigReader &cfg) { double scaleFactor = cfg.rescaleFactor; vector<string>::iterator it, end; unsigned int iImg, iFeature; cv::Mat imgInput; cv::Mat imgSegmap1stStage, imgLabel; CImageCacheElement *pImgElem; char strPostfix[100]; string strInputLabelPath; // iNbMaxImagesLoaded = 10; // bUseIntegralImages = true; pData1stStage = new ImageDataFloat(); pData1stStage->bGenerateFeatures = false; pData1stStage->setConfiguration(cfg); it = cfg.imageFilenames.begin(); end = cfg.imageFilenames.end(); vectImageData.resize(end-it); #ifndef SHUT_UP if (bGenerateFeatures==true) cout << "Set paths and generate HOT1 features for " << end-it << " images: "<<endl; else cout << "Just set paths for " << end-it << " images: "<<endl; #endif iNbLabels = cfg.numLabels; iNbFeatures = pData1stStage->getNbFeatures() + iNbLabels; iWidth = pData1stStage->getWidth(); iHeight = pData1stStage->getHeight(); // load image data for(iImg = 0; it != end; ++it, ++iImg) { pImgElem = &(vectImageData[iImg]); sprintf(strPostfix, "%04d", iImg); pImgElem->strInputImage = cfg.imageFolder + "/" + *it; pImgElem->strFeatureImagesPath = cfg.feature2ndStageFolder + "/features" + strPostfix; pImgElem->strFeatureImagesIntegralPath = cfg.feature2ndStageFolder + "/features_integral" + strPostfix; pImgElem->strLabelImagePath = cfg.outputFolder + "/segmap_1st_stage" + strPostfix + ".png"; if (bGenerateFeatures==true) { cout<<"Generating 2nd-stage HOT1 features for image "<<iImg<<endl; imgSegmap1stStage = cv::imread(pImgElem->strLabelImagePath, cv::IMREAD_GRAYSCALE); if (imgSegmap1stStage.data==NULL) { cout<<"Failed to read 1st-stage segmentation map "<<iImg<<": "<<pImgElem->strLabelImagePath<<endl; return false; } // Check if segmentation map and ground truth image have the same size strInputLabelPath = cfg.groundTruthFolder + "/label_rearranged" + strPostfix + ".png"; imgLabel = cv::imread(strInputLabelPath, cv::IMREAD_GRAYSCALE); if (imgLabel.data==NULL) { cout<<"Failed to read ground truth image "<<iImg<<": "<<pImgElem->strLabelImagePath<<endl; return false; } if (imgSegmap1stStage.size()!=imgLabel.size()) { cv::resize(imgSegmap1stStage, imgSegmap1stStage, imgLabel.size(), 0, 0, cv::INTER_NEAREST); cout<<"Segmentation map "<<iImg<<" resized"<<endl; } computeFeatures(imgSegmap1stStage, pImgElem->vectFeatures); if (bUseIntegralImages==true) { pImgElem->vectFeaturesIntegral.resize(pImgElem->vectFeatures.size()); for (iFeature=0; iFeature<pImgElem->vectFeatures.size(); iFeature++) { cv::integral(pImgElem->vectFeatures[iFeature], pImgElem->vectFeaturesIntegral[iFeature], CV_32F); /* if (pImgElem->vectFeaturesIntegral[iFeature].rows!=pImgElem->vectFeatures[iFeature].rows+1 || pImgElem->vectFeaturesIntegral[iFeature].cols!=pImgElem->vectFeatures[iFeature].cols+1) { cout<<"Size differ"<<endl; }*/ } } pImgElem->bLoaded = true; if (WriteImageData(iImg)==false) return false; CloseImageData(iImg); } else { pImgElem->bLoaded = false; } } cout<<"2nd-stage image data initialized"<<endl; return true; }
void GeometryFeatureExtractor::getFeatures(const SegmentType& segment) { computeFeatures(segment, _features->get(segment.getId())); }