bool StrongClassifier::decide(const FeatureVector &featureVector) const {
  std::vector<float> features(featureVector.size());
  for (int i=0; i<featureVector.size(); i++) {
    features[i] = featureVector.at(i);
  }

  return decide(features);
}
Beispiel #2
0
double Slic::_ComputeDistance(const FeatureVector &vec1, const FeatureVector &vec2)
{
    assert(vec1.size() == vec2.size());
    double dis = 0;
    for (unsigned int i=0;i<vec1.size();++i)
    {
        dis += (vec1[i]-vec2[i])*(vec1[i]-vec2[i]);
    }
    return dis;
}
Beispiel #3
0
  inline
  Tp1 dot_product(const FeatureVector<Tp1, Alloc1>& x, const WeightVector<Tp, Alloc>& w, const FeatureVector<Tp2, Alloc2>& y)
  {
    typedef FeatureVector<Tp1, Alloc1> feature_vector1_type;
    typedef WeightVector<Tp, Alloc>    weight_vector_type;
    typedef FeatureVector<Tp2, Alloc2> feature_vector2_type;
    
    if (x.empty() || y.empty()) return Tp1();
    
    if (static_cast<const void*>(&x) == static_cast<const void*>(&y))
      return details::__inner_product(x.begin(), x.end(), w, Tp1());
    
if (x.size() < y.size())
      return dot_product(x.begin(), x.end(), w, y, Tp1());
    else
      return dot_product(x, w, y.begin(), y.end(), Tp1());
  }
Beispiel #4
0
 inline
 Tp1 dot_product(const FeatureVector<Tp1, Alloc1>& x, const FeatureVector<Tp2, Alloc2>& y)
 {
   typedef FeatureVector<Tp1, Alloc1> feature_vector1_type;
   typedef FeatureVector<Tp2, Alloc2> feature_vector2_type;
   
   if (x.empty() || y.empty()) return Tp1();
   
   // if the same address, we are identical!
   if (static_cast<const void*>(&x) == static_cast<const void*>(&y))
     return details::__inner_product(x.begin(), x.end(), Tp1());
   
   if (x.size() < y.size())
     return dot_product(x.begin(), x.end(), y, Tp1());
   else
     return dot_product(x, y.begin(), y.end(), Tp1());
 }
FeatureSet FeatureSet::clone() const
{
  FeatureSet other;
  for (FeatureSet::const_iterator i = begin(); i != end(); ++i) {
    FeatureVector cv = i->clone();
    // check that all data is the same:
    if (cv.size() != i->size())
      throw std::runtime_error("ssssssssss");
    for (unsigned int idx=0; idx<cv.size(); ++idx) {
      if (cv[idx] != (*i)[idx]) {
        cout << "(" << cv[idx] << "!=" << (*i)[idx] << ")" << flush;
        throw std::runtime_error("??????????");
      }
    }
    if (cv.getTag<TagTrueClassLabel>().label != i->getTag<TagTrueClassLabel>().label) {
      cout << "(" << cv.getTag<TagTrueClassLabel>().label << "!=" << i->getTag<TagTrueClassLabel>().label << ")" << flush;
      throw std::runtime_error("!!!!!!!!!!!");
    }
    other.push_back(cv);
  }
  return other;
}
PERIPHERAL_ERROR GetFeatures(const JOYSTICK_INFO* joystick, const char* controller_id,
                             unsigned int* feature_count, JOYSTICK_FEATURE** features)
{
  if (!joystick || !controller_id || !feature_count || !features)
    return PERIPHERAL_ERROR_INVALID_PARAMETERS;

  FeatureVector featureVector;
  if (CStorageManager::Get().GetFeatures(ADDON::Joystick(*joystick), controller_id,  featureVector))
  {
    *feature_count = featureVector.size();
    ADDON::JoystickFeatures::ToStructs(featureVector, features);
    return PERIPHERAL_NO_ERROR;
  }

  return PERIPHERAL_ERROR_FAILED;
}
Beispiel #7
0
int main(int argc, char** argv)
{
  static const size_t K = 3;
  
  typedef double Scalar;
  typedef Eigen::Matrix<Scalar, 1, K> Feature;
  typedef std::vector<Feature, Eigen::aligned_allocator<Feature> > FeatureVector;
  FeatureVector features(1000*K);
  FeatureVector centers;
  std::vector<unsigned int> membership;

  FILE* data = fopen(argv[1], "r");
  for (size_t i = 0; i < features.size(); ++i) {
    Feature& f = features[i];
    fscanf(data, "%lf %lf %lf", &f[0], &f[1], &f[2]);
  }

  vt::SimpleKmeans<Feature> kmeans(Feature::Zero());
  double sse = kmeans.cluster(features, K, centers, membership);

  for (size_t i = 0; i < centers.size(); ++i) {
    printf("%f %f %f\n", centers[i][0], centers[i][1], centers[i][2]);
  }
  printf("sse = %f\n", sse);

  unsigned int pattern[K];
  pattern[0] = membership[0];
  pattern[1] = membership[1];
  pattern[2] = membership[2];
  for (size_t i = 0; i < membership.size(); i+=K) {
    if (membership[i] != pattern[0] ||
        membership[i+1] != pattern[1] ||
        membership[i+2] != pattern[2]) {
      printf("Misassignment!\n");
      return -1;
    }
  }
}
void ClassifySvmSharedCommand::processSharedAndDesignData(vector<SharedRAbundVector*> lookup) {
    try {
        OutputFilter outputFilter(verbosity);

        LabeledObservationVector labeledObservationVector;
        FeatureVector featureVector;
        readSharedRAbundVectors(lookup, designMap, labeledObservationVector, featureVector);

        // optionally remove features with low standard deviation
        if ( stdthreshold > 0.0 ) {
            FeatureVector removedFeatureVector = applyStdThreshold(stdthreshold, labeledObservationVector, featureVector);
            if (removedFeatureVector.size() > 0) {
                std::cout << removedFeatureVector.size() << " OTUs were below the stdthreshold of " << stdthreshold << " and were removed" << std::endl;
                if ( outputFilter.debug() ) {
                    std::cout << "the following OTUs were below the standard deviation threshold of " << stdthreshold << std::endl;
                    for (FeatureVector::iterator i = removedFeatureVector.begin(); i != removedFeatureVector.end(); i++) {
                        std::cout << "  " << i->getFeatureLabel() << std::endl;
                    }
                }
            }
        }

        // apply [0,1] standardization
        if ( transformName == "zeroone") {
            std::cout << "transforming data to lie within range [0,1]" << std::endl;
            transformZeroOne(labeledObservationVector);
        }
        else {
            std::cout << "transforming data to have zero mean and unit variance" << std::endl;
            transformZeroMeanUnitVariance(labeledObservationVector);
        }

        SvmDataset svmDataset(labeledObservationVector, featureVector);

        OneVsOneMultiClassSvmTrainer trainer(svmDataset, evaluationFoldCount, trainingFoldCount, *this, outputFilter);

        if ( mode == "rfe" ) {
            SvmRfe svmRfe;
            ParameterRange& linearKernelConstantRange = kernelParameterRangeMap["linear"]["constant"];
            ParameterRange& linearKernelSmoCRange = kernelParameterRangeMap["linear"]["smoc"];
            RankedFeatureList rankedFeatureList = svmRfe.getOrderedFeatureList(svmDataset, trainer, linearKernelConstantRange, linearKernelSmoCRange);

            map<string, string> variables;
            variables["[filename]"] = outputDir + m->getRootName(m->getSimpleName(sharedfile));
            variables["[distance]"] = lookup[0]->getLabel();
            string filename = getOutputFileName("summary", variables);
            outputNames.push_back(filename);
            outputTypes["summary"].push_back(filename);
            m->mothurOutEndLine();

            std::ofstream outputFile(filename.c_str());

            int n = 0;
            int rfeRoundCount = rankedFeatureList.front().getRank();
            std::cout << "ordered features:" << std::endl;
            std::cout << setw(5)  << "index"
                      << setw(12) << "OTU"
                      << setw(5)  << "rank"
                      << std::endl;
            outputFile << setw(5)  << "index"
                       << setw(12) << "OTU"
                       << setw(5)  << "rank"
                       << std::endl;
            for (RankedFeatureList::iterator i = rankedFeatureList.begin(); i != rankedFeatureList.end(); i++) {
                n++;
                int rank = rfeRoundCount - i->getRank() + 1;
                outputFile << setw(5)  << n
                           << setw(12) << i->getFeature().getFeatureLabel()
                           << setw(5)  << rank
                           << std::endl;
                if ( n <= 20 ) {
                    std::cout << setw(5) << n
                              << setw(12) << i->getFeature().getFeatureLabel()
                              << setw(5) << rank
                              << std::endl;
                }
            }
            outputFile.close();
        }
        else {
            MultiClassSVM* mcsvm = trainer.train(kernelParameterRangeMap);

            map<string, string> variables;
            variables["[filename]"] = outputDir + m->getRootName(m->getSimpleName(sharedfile));
            variables["[distance]"] = lookup[0]->getLabel();
            string filename = getOutputFileName("summary", variables);
            outputNames.push_back(filename);
            outputTypes["summary"].push_back(filename);
            m->mothurOutEndLine();

            std::ofstream outputFile(filename.c_str());

            printPerformanceSummary(mcsvm, std::cout);
            printPerformanceSummary(mcsvm, outputFile);

            outputFile << "actual  predicted" << std::endl;
            for ( LabeledObservationVector::const_iterator i = labeledObservationVector.begin(); i != labeledObservationVector.end(); i++ ) {
                Label actualLabel = i->getLabel();
                outputFile << i->getDatasetIndex() << " " << actualLabel << " ";
                try {
                    Label predictedLabel = mcsvm->classify(*(i->getObservation()));
                    outputFile << predictedLabel << std::endl;
                }
                catch ( MultiClassSvmClassificationTie& m ) {
                    outputFile << "tie" << std::endl;
                    std::cout << "classification tie for observation " << i->datasetIndex << " with label " << i->first << std::endl;
                }

            }
            outputFile.close();
            delete mcsvm;
        }

    }
    catch (exception& e) {
        m->errorOut(e, "ClassifySvmSharedCommand", "processSharedAndDesignData");
        exit(1);
    }
}
bool CButtonMapXml::Load(void)
{
  TiXmlDocument xmlFile;
  if (!xmlFile.LoadFile(m_strResourcePath))
  {
    esyslog("Error opening %s: %s", m_strResourcePath.c_str(), xmlFile.ErrorDesc());
    return false;
  }

  TiXmlElement* pRootElement = xmlFile.RootElement();
  if (!pRootElement || pRootElement->NoChildren() || pRootElement->ValueStr() != BUTTONMAP_XML_ROOT)
  {
    esyslog("Can't find root <%s> tag", BUTTONMAP_XML_ROOT);
    return false;
  }

  const TiXmlElement* pDevice = pRootElement->FirstChildElement(DEVICES_XML_ELEM_DEVICE);

  if (!pDevice)
  {
    esyslog("Can't find <%s> tag", DEVICES_XML_ELEM_DEVICE);
    return false;
  }

  if (!CDeviceXml::Deserialize(pDevice, m_device))
    return false;

  const TiXmlElement* pController = pDevice->FirstChildElement(BUTTONMAP_XML_ELEM_CONTROLLER);

  if (!pController)
  {
    esyslog("Device \"%s\": can't find <%s> tag", m_device.Name().c_str(), BUTTONMAP_XML_ELEM_CONTROLLER);
    return false;
  }

  // For logging purposes
  unsigned int totalFeatureCount = 0;

  while (pController)
  {
    const char* id = pController->Attribute(BUTTONMAP_XML_ATTR_CONTROLLER_ID);
    if (!id)
    {
      esyslog("Device \"%s\": <%s> tag has no attribute \"%s\"", m_device.Name().c_str(),
              BUTTONMAP_XML_ELEM_CONTROLLER, BUTTONMAP_XML_ATTR_CONTROLLER_ID);
      return false;
    }

    FeatureVector features;
    if (!Deserialize(pController, features))
      return false;

    if (features.empty())
    {
      esyslog("Device \"%s\" has no features for controller %s", m_device.Name().c_str(), id);
    }
    else
    {
      totalFeatureCount += features.size();
      m_buttonMap[id] = std::move(features);
    }

    pController = pController->NextSiblingElement(BUTTONMAP_XML_ELEM_CONTROLLER);
  }

  dsyslog("Loaded device \"%s\" with %u controller profiles and %u total features", m_device.Name().c_str(), m_buttonMap.size(), totalFeatureCount);

  return true;
}
void TrackingInfo::addFromFeatureCandidates( const FeatureVector &eyesCandidates, const FeatureVector &noseCandidates, const FeatureVector &mouthCandidates, double time, double posAffWeight, double sizeAffWeight )
{
	typedef std::multimap<double, const _2Real::Space2D*> PriorityMap;

	if( eyesCandidates.size() )
	{
		PriorityMap mapLeft;
		PriorityMap mapRight;

		for( FeatureVector::const_iterator it = eyesCandidates.begin(); it != eyesCandidates.end(); ++it )
		{
			_2Real::Vec2 center( ( it->getP0() + it->getP1() ) * 0.5f );
			if( center[0] > 0.5f )
			{
				if( m_eyeLeftTrajectory.canCalcAffinity() )
					mapLeft.insert( std::make_pair( m_eyeLeftTrajectory.calcAffinity( *it, posAffWeight, sizeAffWeight, 1.0, time ), &( *it ) ) );
				else
					mapLeft.insert( std::make_pair( 0.0, &( *it ) ) );
			}
			else
			{
				if( m_eyeRightTrajectory.canCalcAffinity() )
					mapRight.insert( std::make_pair( m_eyeRightTrajectory.calcAffinity( *it, posAffWeight, sizeAffWeight, 1.0, time ), &( *it ) ) );
				else
					mapRight.insert( std::make_pair( 0.0, &( *it ) ) );
			}
		}

		if( mapLeft.size() )
			m_eyeLeftTrajectory.add( *( mapLeft.begin()->second ), 0.0, time );
		else
		{
			//NOTE: there is no measure (yet?) to decide whether or not it would make sense to still extrapolate face feature, so this is not done at all,
			// instead trajectories are pruned so entries don't last forever. (actually *duplicating* last entry would make more sense in context of
			// face feature regions)
			m_eyeLeftTrajectory.prune();
		}

		if( mapRight.size() )
			m_eyeRightTrajectory.add( *( mapRight.begin()->second ), 0.0, time );
		else
		{
			//NOTE: there is no measure (yet?) to decide whether or not it would make sense to still extrapolate face feature, so this is not done at all,
			// instead trajectories are pruned so entries don't last forever. (actually *duplicating* last entry would make more sense in context of
			// face feature regions)
			m_eyeRightTrajectory.prune();
		}
	}
	else
	{
		//NOTE: there is no measure (yet?) to decide whether or not it would make sense to still extrapolate face feature, so this is not done at all,
		// instead trajectories are pruned so entries don't last forever. (actually *duplicating* last entry would make more sense in context of
		// face feature regions)
		m_eyeLeftTrajectory.prune();
		m_eyeRightTrajectory.prune();
	}

	if( noseCandidates.size() )
	{
		double minAffinity = 0.0;
		const _2Real::Space2D *bestCandidate = NULL;

		if( !m_noseTrajectory.canCalcAffinity() )
			bestCandidate = &( noseCandidates.front() ); //not enough regions in trajectory history in order to judge, just take first in candidate list
		else
		{
			for( FeatureVector::const_iterator it = noseCandidates.begin(); it != noseCandidates.end(); ++it )
			{
				if( !bestCandidate )
				{
					bestCandidate = &( *it );
					minAffinity = m_noseTrajectory.calcAffinity( *it, posAffWeight, sizeAffWeight, 1.0, time );
				}
				else
				{
					double affinity = m_noseTrajectory.calcAffinity( *it, posAffWeight, sizeAffWeight, 1.0, time );
					if( affinity < minAffinity )
					{
						bestCandidate = &( *it );
						minAffinity = affinity;
					}
				}
			}
		}

		m_noseTrajectory.add( *bestCandidate, 0.0, time );
	}
	else
	{
		//NOTE: there is no measure (yet?) to decide whether or not it would make sense to still extrapolate face feature, so this is not done at all,
		// instead trajectories are pruned so entries don't last forever. (actually *duplicating* last entry would make more sense in context of
		// face feature regions)
		m_noseTrajectory.prune();
	}

	if( mouthCandidates.size() )
	{
		double minAffinity = 0.0;
		const _2Real::Space2D *bestCandidate = NULL;

		if( !m_mouthTrajectory.canCalcAffinity() )
			bestCandidate = &( mouthCandidates.front() ); //not enough regions in trajectory history in order to judge, just take first in candidate list
		else
		{
			for( FeatureVector::const_iterator it = mouthCandidates.begin(); it != mouthCandidates.end(); ++it )
			{
				if( !bestCandidate )
				{
					bestCandidate = &( *it );
					minAffinity = m_mouthTrajectory.calcAffinity( *it, posAffWeight, sizeAffWeight, 1.0, time );
				}
				else
				{
					double affinity = m_mouthTrajectory.calcAffinity( *it, posAffWeight, sizeAffWeight, 1.0, time );
					if( affinity < minAffinity )
					{
						bestCandidate = &( *it );
						minAffinity = affinity;
					}
				}
			}
		}

		m_mouthTrajectory.add( *bestCandidate, 0.0, time );
	}
	else
	{
		//NOTE: there is no measure (yet?) to decide whether or not it would make sense to still extrapolate face feature, so this is not done at all,
		// instead trajectories are pruned so entries don't last forever. (actually *duplicating* last entry would make more sense in context of
		// face feature regions)
		m_mouthTrajectory.prune();
	}
}
int main(int argc, char** argv )
{
  if ( argc >= 2 )
  {
    std::cout << "ESC to quit, any other key to continue to next image." << std::endl;

    std::vector<cv::String> filenames;
    cv::String folder = cv::String(argv[1]);
    cv::glob(folder, filenames);
 
    std::string configFolder = argv[1];
    configFolder.append( "config" );
    std::string carName = "Goffin";
    if( argc >= 3 )
    {
      carName = argv[2];
    }
    
    std::string mConfigPath( Config::setConfigPath( configFolder, carName ) );
    std::string mBirdViewCalFile( ( boost::filesystem::path(configFolder) / carName / "BirdviewCal.yml").string() );

    CoordinateConverter converter( mConfigPath );
    HaarFilter filter( &converter );

    BirdViewConverter mBirdViewConverter;
    mBirdViewConverter.loadConfig( mBirdViewCalFile );

    cvflann::StartStopTimer timer;

    for(size_t i = 0; i < filenames.size(); ++i)
    {
      cv::Mat image = cv::imread( filenames[i], CV_LOAD_IMAGE_GRAYSCALE );
      std::cout << filenames[i] << std::endl;

      cv::Mat mBirdViewImageGray = mBirdViewConverter.transform( image );
      cv::Mat mBirdViewImage;
      cv::cvtColor(mBirdViewImageGray, mBirdViewImage, CV_GRAY2BGR);

      timer.start();

      filter.setImage( mBirdViewImageGray );

      filter.calculateFeaturesPerLine(
          true,
          true);

      FeatureVector* featureVec = filter.getFeatures();
      std::cout << featureVec->size() << std::endl;

      timer.stop();
      std::cout << "Time taken for image: " << timer.value << std::endl;

      //cv::Mat result = filter.generateDebugImage();

      cv::Mat channels[4];
      //cv::cvtColor( mLastBirdViewImage, output, CV_GRAY2BGR );

      cv::Mat features = filter.generateDebugImage();
      cv::split( features, channels );
      cv::Mat sum;
      cv::bitwise_or( mBirdViewImage, 0, sum, 255 - channels[3] );

      cv::Mat featuresRGB;
      cv::cvtColor( features, featuresRGB, CV_BGRA2BGR );
      sum = sum + featuresRGB;

      imshow( "input", mBirdViewImageGray );
      imshow( "result", sum );

      int code = cv::waitKey(0);
      if ( code == 27 )		// ESC to abort
        return 1;
    }
  }
  else
  {
    std::cout << "Usage: test_oadrive_haarfeatures <path> [CarName]" << std::endl;
    std::cout << "\tWhere <path> is a folder containing a test case and" << std::endl;
    std::cout << "\tCarName is an optional car name (default: Goffin)" << std::endl;
  }
  return 0;
}