Example #1
0
//-----------------------------------------------------------------------------
// The application object
//-----------------------------------------------------------------------------
void CSFMGenApp::GenerateSFMFiles( SFMGenInfo_t& info )
{
	char pRelativeModelPath[MAX_PATH];
	Q_ComposeFileName( "models", info.m_pModelName, pRelativeModelPath, sizeof(pRelativeModelPath) );
	Q_SetExtension( pRelativeModelPath, ".mdl", sizeof(pRelativeModelPath) );
	MDLHandle_t hMDL = g_pMDLCache->FindMDL( pRelativeModelPath );
	if ( hMDL == MDLHANDLE_INVALID )
	{
		Warning( "sfmgen: Model %s doesn't exist!\n", pRelativeModelPath );
		return;
	}

	studiohdr_t *pStudioHdr = g_pMDLCache->GetStudioHdr( hMDL );
	if ( !pStudioHdr || g_pMDLCache->IsErrorModel( hMDL ) )
	{
		Warning( "sfmgen: Model %s doesn't exist!\n", pRelativeModelPath );
		return;
	}

	CUtlBuffer buf( 0, 0, CUtlBuffer::TEXT_BUFFER );
	if ( !g_pFullFileSystem->ReadFile( info.m_pCSVFile, NULL, buf ) )
	{
		Warning( "sfmgen: Unable to load file %s\n", info.m_pCSVFile );
		return;
	}

	CUtlVector< SFMInfo_t > infoList;
	ParseCSVFile( buf, infoList, 1 );

	int nCount = infoList.Count();
	if ( nCount == 0 )
	{
		Warning( "sfmgen: no files to create!\n" );
		return;
	}

	UniqueifyNames( infoList );

	// Construct full path to the output directories
	char pFullPath[MAX_PATH];
	char pFullFacPathBuf[MAX_PATH];
	const char *pExportFacPath = NULL;
	ComputeFullPath( info.m_pOutputDirectory, pFullPath, sizeof(pFullPath) );
	if ( info.m_pExportFacDirectory )
	{
		ComputeFullPath( info.m_pExportFacDirectory, pFullFacPathBuf, sizeof(pFullFacPathBuf) );
		pExportFacPath = pFullFacPathBuf;
	}

	for ( int i = 0; i < nCount; ++i )
	{
		GenerateSFMFile( info, infoList[i], pStudioHdr, pFullPath, pExportFacPath ); 
	}
}
void RunIrisSupervisedLearning()
{
    // Load training/test dataset
	Table data = ParseCSVFile("data/iris.data");
	// Print out the data to ensure we've loaded it correctly
	std::cout << "Loaded Data:" << std::endl;
    PrintTable(data);


	// Extract feature std::vectors and classes from the loaded data
	std::vector<SampleType> allSamples = GetFeatureVectors(data);
	std::vector<std::string> classes = GetClasses(data);


	// Construct labels compatible with SVMs using the class data
	// Each class is made an integer. The integers grow one number
	// apart, so three classes will be assigned the labels 1, 2 and
	// 3 respectively.
	std::vector<LabelType> allLabels = ConstructLabels(classes);


    // Randomise the samples and labels to ensure the normalisation process
    // does affect the performance of cross validation
    randomize_samples(allSamples, allLabels);


	// Split dataset in half - one half being the training set
	// and one half being the test set.
	// Done AFTER randomising so half of the data set isn't one class
	// and half is the other - would result in a very incorrect classifier!
	unsigned int numTraining = round(allSamples.size() / 2);
	unsigned int numTest = allSamples.size() - numTraining;

	std::vector<SampleType> trainingSamples;
	std::vector<LabelType> trainingLabels;
	trainingSamples.reserve(numTraining);
	trainingLabels.reserve(numTraining);
	std::vector<SampleType> testSamples;
	std::vector<LabelType> testLabels;
	testSamples.reserve(numTest);
	testLabels.reserve(numTest);

	for (unsigned int i = 0; (i < numTraining); ++i)
	{
	    trainingSamples.push_back(allSamples[i]);
	    trainingLabels.push_back(allLabels[i]);
	}
	for (unsigned int i = numTraining; (i < allSamples.size()); ++i)
	{
        testSamples.push_back(allSamples[i]);
        testLabels.push_back(allLabels[i]);
	}


    // Construct a trainer for the problem
    dlib::krr_trainer<KernelType> trainer;
    double bestGamma = FindBestGamma(trainer, trainingSamples, trainingLabels);
    trainer.set_kernel(KernelType(bestGamma));


    // Actually TRAIN the classifier using the data, LEARNING the function
    FunctionType learnedFunction;
    learnedFunction = trainer.train(trainingSamples, trainingLabels);


    // NOTE: This should just print out 1 for our training method
    std::cout << "The number of support vectors in our learned function is "
        << learnedFunction.basis_vectors.nr() << std::endl;

    double accuracy = CalculateAccuracy(learnedFunction, testSamples, testLabels);
    std::cout << "The accuracy of this classifier is: "
        << (accuracy * 100) << "%." << std::endl;
}