Example #1
0
void train (const ssi_char_t *dir, const ssi_char_t *model) {

	// load samples
	StringList files;
	FileTools::ReadFilesFromDir (files, dir, "*.wav");
	SampleList samples;	
	samples.addUserName ("user");

	for (ssi_size_t i = 0; i < files.size (); i++) {
		ssi_stream_t *stream = new ssi_stream_t;
		ssi_sample_t *sample = new ssi_sample_t;
		const ssi_char_t *filename = files.get (i);
	
		// parse class name
		FilePath fp (files.get(i));
		ssi_char_t *class_name = ssi_strcpy (fp.getName ());
		for (ssi_size_t j = 0; j < strlen (class_name); j++) {
			if (class_name[j] == '_') {
				class_name[j] = '\0';
				break;
			}
		}
		ssi_size_t class_id = samples.addClassName (class_name);
		delete[] class_name;

		// read wave file
		WavTools::ReadWavFile (filename, *stream);

		// create sample
		sample->class_id = class_id;
		sample->num = 1;
		sample->score = 1.0f;
		sample->streams = new ssi_stream_t *[1];
		sample->streams[0] = stream;
		sample->time = 0;
		sample->user_id = 0;				

		// add sample
		samples.addSample (sample);
	}

	// extract features
	SampleList samples_t;
	EmoVoiceFeat *ev_feat = ssi_create (EmoVoiceFeat, "ev_feat", true);
	ModelTools::TransformSampleList (samples, samples_t, *ev_feat);
	
	// create model
	IModel *bayes = ssi_create (NaiveBayes, "bayes", true);
	Trainer trainer (bayes);

	// evalulation
	Evaluation eval;
	eval.evalKFold (&trainer, samples_t, 10);
	eval.print ();

	// train & save
	trainer.train (samples_t);
	trainer.save (model);
}
Example #2
0
bool ex_fusion(void *arg) {

	ssi_tic ();

	ssi_size_t n_classes = 4;
	ssi_size_t n_samples = 50;
	ssi_size_t n_streams = 3;
	ssi_real_t train_distr[][3] = { 0.25f, 0.25f, 0.1f, 0.25f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f };
	ssi_real_t test_distr[][3] = { 0.5f, 0.5f, 0.5f };
	SampleList strain;
	SampleList sdevel;
	SampleList stest;
	ModelTools::CreateTestSamples (strain, n_classes, n_samples, n_streams, train_distr, "user");			
	ModelTools::CreateTestSamples (sdevel, n_classes, n_samples, n_streams, train_distr, "user");	
	ModelTools::CreateTestSamples (stest, 1, n_samples * n_classes, n_streams, test_distr, "user");	
	ssi_char_t string[SSI_MAX_CHAR];	
	for (ssi_size_t n_class = 1; n_class < n_classes; n_class++) {
		ssi_sprint (string, "class%02d", n_class);
		stest.addClassName (string);
	}

	ssi_char_t *name = "fusion";

	// strain
	{
		IModel **models = new IModel *[n_streams];
		ssi_char_t string[SSI_MAX_CHAR];
		for (ssi_size_t n_stream = 0; n_stream < n_streams; n_stream++) {
			ssi_sprint (string, "%s.%02d", name, n_stream);
			models[n_stream] = ssi_create(SimpleKNN, string, true);
		}
		SimpleFusion *fusion = ssi_create (SimpleFusion, name, true);

		Trainer trainer (n_streams, models, fusion);
		trainer.train (strain);
		trainer.save ("fusion");

		delete[] models;
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load (trainer, "fusion");					
		Evaluation eval;
		eval.eval (&trainer, sdevel);
		eval.print ();
	}

	ssi_print_off("");
	ssi_toc_print ();
	ssi_print("\n");

	return true;
}
Example #3
0
bool ex_eval_regression(void *arg) {

	Trainer::SetLogLevel(SSI_LOG_LEVEL_DEBUG);

	ssi_size_t n_samples = 1000;

	SampleList strain;
	SampleList sdevel;
	SampleList stest;
	ModelTools::CreateTestSamplesRegression(strain, n_samples, 0.1f);
	ModelTools::CreateTestSamplesRegression(stest, n_samples, 0.1f);

	LibSVM *model = ssi_create(LibSVM, 0, true);
	model->getOptions()->seed = 1234;
	model->getOptions()->silent = false;
	model->getOptions()->params.svm_type = LibSVM::TYPE::EPSILON_SVR;
	model->getOptions()->params.kernel_type = LibSVM::KERNEL::RADIAL;
	

	Trainer trainer(model);
	ISNorm::Params params;
	ISNorm::ZeroParams(params, ISNorm::METHOD::SCALE);
	params.limits[0] = 0.0f;
	params.limits[1] = 1.0f;
	trainer.setNormalization(&params);
	//ModelTools::PlotSamplesRegression(strain, "TRAINING", ssi_rect(640, 0, 400, 400));
	trainer.train(strain);
	
	Evaluation eval;
	eval.eval(&trainer, stest);

	ssi_real_t pcc = eval.get_metric(Evaluation::METRIC::PEARSON_CC);
	ssi_real_t mse = eval.get_metric(Evaluation::METRIC::MSE);
	ssi_real_t rmse = eval.get_metric(Evaluation::METRIC::RMSE);

	ssi_print("\n -------------------------------------");
	ssi_print("\n PCC: %.4f", pcc);
	ssi_print("\n MSE: %.4f", mse);
	ssi_print("\n RMSE: %.4f", rmse);
	ssi_print("\n -------------------------------------\n");


	FILE *fp = fopen("eval_regression.csv", "w");
	eval.print(fp, Evaluation::PRINT::CSV_EX);
	fclose(fp);

	//ModelTools::PlotSamplesRegression(stest, "TEST", ssi_rect(640, 0, 400, 400));

	return true;

}
Example #4
0
bool ex_model_norm(void *arg) {

	Trainer::SetLogLevel(SSI_LOG_LEVEL_DEBUG);

	ssi_size_t n_classes = 4;
	ssi_size_t n_samples = 50;
	ssi_size_t n_streams = 1;
	ssi_real_t train_distr[][3] = { 0.25f, 0.25f, 0.1f, 0.25f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f };
	ssi_real_t test_distr[][3] = { 0.5f, 0.5f, 0.5f };
	SampleList strain;
	SampleList sdevel;
	SampleList stest;
	ModelTools::CreateTestSamples(strain, n_classes, n_samples, n_streams, train_distr, "user");
	ModelTools::CreateTestSamples(sdevel, n_classes, n_samples, n_streams, train_distr, "user");
	ModelTools::CreateTestSamples(stest, 1, n_samples * n_classes, n_streams, test_distr, "user");
	ssi_char_t string[SSI_MAX_CHAR];
	for (ssi_size_t n_class = 1; n_class < n_classes; n_class++) {
		ssi_sprint(string, "class%02d", n_class);
		stest.addClassName(string);
	}

	// train svm
	{
		SVM *model = ssi_create(SVM, 0, true);
		model->getOptions()->seed = 1234;		
		Trainer trainer(model);
		ISNorm::Params params;
		ISNorm::ZeroParams(params, ISNorm::METHOD::ZSCORE);
		trainer.setNormalization(&params);
		trainer.train(strain);
		trainer.save("svm+norm");
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load(trainer, "svm+norm");
		Evaluation eval;
		eval.eval(&trainer, sdevel);
		eval.print();

		trainer.cluster(stest);
		ModelTools::PlotSamples(stest, "svm (external normalization)", ssi_rect(650,0,400,400));
	}

	return true;
}
Example #5
0
bool ex_model_frame(void *args)
{
	ssi_size_t n_classes = 4;
	ssi_size_t n_samples = 50;
	ssi_size_t n_streams = 1;
	ssi_real_t distr[][3] = { 0.25f, 0.25f, 0.1f, 0.25f, 0.75f, 0.1f, 0.75f, 0.25f, 0.1f, 0.75f, 0.75f, 0.1f };
	ssi_size_t num_min = 2;
	ssi_size_t num_max = 5;

	SampleList strain, sdevel;
	ModelTools::CreateDynamicTestSamples(strain, n_classes, n_samples, n_streams, distr, num_min, num_max, "user");
	ModelTools::PrintInfo(strain);
	ModelTools::CreateDynamicTestSamples(sdevel, n_classes, n_samples, n_streams, distr, num_min, num_max, "user");
	ModelTools::PrintInfo(sdevel);

	{
		FrameFusion *model = ssi_create(FrameFusion, 0, true);
		model->getOptions()->method = FrameFusion::METHOD::PRODUCT;
		model->getOptions()->n_context = 2;
		model->setModel(ssi_create(SVM, 0, true));
		Trainer trainer(model);
		trainer.train(strain);
		trainer.save("framefusion");
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load(trainer, "framefusion");
		Evaluation eval;
		eval.eval(&trainer, sdevel);
		eval.print();
	}

	return true;
}
Example #6
0
bool FeatureFusion::train (ssi_size_t n_models,
	IModel **models,
	ISamples &samples) {

	if (samples.getSize () == 0) {
		ssi_wrn ("empty sample list");
		return false;
	}

	if (isTrained ()) {
		ssi_wrn ("already trained");
		return false;
	}

	_n_streams = samples.getStreamSize ();
	_n_classes = samples.getClassSize ();
	_n_models  = n_models;

	//initialize weights
	ssi_real_t **weights = new ssi_real_t*[n_models];
	for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
		weights[n_model] = new ssi_real_t[_n_classes+1];		
	}

	if (samples.hasMissingData ()) {

		_handle_md = true;

		ISMissingData samples_h (&samples);
		Evaluation eval;
		
		if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
			ssi_print("\nMissing data detected.\n");
		}
		
		//models[0] is featfuse_model, followed by singlechannel_models
		ISMergeDim ffusionSamples (&samples);
		ISMissingData ffusionSamples_h (&ffusionSamples);
		ffusionSamples_h.setStream(0);
		if (!models[0]->isTrained ()) { models[0]->train (ffusionSamples_h, 0); }

		if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
			eval.eval (*models[0], ffusionSamples_h, 0);
			eval.print();
		}
		//dummy weights for fused model
		for (ssi_size_t n_class = 0; n_class < _n_classes; n_class++) {
			weights[0][n_class] = 0.0f;
		}		
		weights[0][_n_classes] = 0.0f;	
		
		for (ssi_size_t n_model = 1; n_model < n_models; n_model++) {
			
			if (!models[n_model]->isTrained ()) {
				samples_h.setStream (n_model - 1);
				models[n_model]->train (samples_h, n_model - 1);
			}

			eval.eval (*models[n_model], samples_h, n_model - 1);

			if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
				eval.print();
			}

			for (ssi_size_t n_class = 0; n_class < _n_classes; n_class++) {
				weights[n_model][n_class] = eval.get_class_prob (n_class);
			}		
			weights[n_model][_n_classes] = eval.get_classwise_prob ();	
		}

		//calculate fillers
		_filler = new ssi_size_t[_n_streams];
		for (ssi_size_t n_fill = 0; n_fill < _n_streams; n_fill++) {
			_filler[n_fill] = 1;
			ssi_real_t filler_weight = weights[1][_n_classes];
			for (ssi_size_t n_model = 2; n_model < n_models; n_model++) {
				if (filler_weight < weights[n_model][_n_classes]) {
					_filler[n_fill] = n_model;
					filler_weight = weights[n_model][_n_classes];
				}
			}
			weights[_filler[n_fill]][_n_classes] = 0.0f;
		}
		if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
			ssi_print("\nfiller:\n");
			for (ssi_size_t n_model = 0; n_model < _n_streams; n_model++) {
				ssi_print("%d ", _filler[n_model]);
			}ssi_print("\n");
		}
	
	}
	else{

		_handle_md = false;

		if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
			ssi_print("\nNo missing data detected.\n");
		}
		ISMergeDim ffusionSamples (&samples);
		if (!models[0]->isTrained ()) { models[0]->train (ffusionSamples, 0); }
		//dummy
		_filler = new ssi_size_t[_n_streams];
		for (ssi_size_t n_fill = 0; n_fill < _n_streams; n_fill++) {
			_filler[n_fill] = 0;
		}
	}

	if (weights) {
		for (ssi_size_t n_model = 0; n_model < _n_models; n_model++) {
			delete[] weights[n_model];
		}
		delete[] weights;
		weights = 0;
	}

	return true;
}
Example #7
0
bool ex_model(void *arg) {

	Trainer::SetLogLevel (SSI_LOG_LEVEL_DEBUG);

	ssi_size_t n_classes = 4;
	ssi_size_t n_samples = 50;
	ssi_size_t n_streams = 1;
	ssi_real_t train_distr[][3] = { 0.25f, 0.25f, 0.1f, 0.25f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f };
	ssi_real_t test_distr[][3] = { 0.5f, 0.5f, 0.5f };
	SampleList strain;
	SampleList sdevel;
	SampleList stest;
	ModelTools::CreateTestSamples (strain, n_classes, n_samples, n_streams, train_distr, "user");	
	ModelTools::CreateTestSamples (sdevel, n_classes, n_samples, n_streams, train_distr, "user");	
	ModelTools::CreateTestSamples (stest, 1, n_samples * n_classes, n_streams, test_distr, "user");	
	ssi_char_t string[SSI_MAX_CHAR];	
	for (ssi_size_t n_class = 1; n_class < n_classes; n_class++) {
		ssi_sprint (string, "class%02d", n_class);
		stest.addClassName (string);
	}
	
	// train svm
	{
		SVM *model = ssi_create(SVM, 0, true);
		model->getOptions()->seed = 1234;
		Trainer trainer(model);
		trainer.train(strain);
		trainer.save("svm");
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load(trainer, "svm");
		Evaluation eval;
		eval.eval(&trainer, sdevel);
		eval.print();

		trainer.cluster(stest);
		ModelTools::PlotSamples(stest, "svm (internal normalization)", ssi_rect(650, 0, 400, 400));
	}

	// train knn
	{
		KNearestNeighbors *model = ssi_create(KNearestNeighbors, 0, true);
		model->getOptions()->k = 5;
		//model->getOptions()->distsum = true;
		Trainer trainer (model);
		trainer.train (strain);
		trainer.save ("knn");
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load (trainer, "knn");			
		Evaluation eval;
		eval.eval (&trainer, sdevel);
		eval.print ();

		trainer.cluster (stest);
		ModelTools::PlotSamples(stest, "knn", ssi_rect(650, 0, 400, 400));
	}

	// train naive bayes
	{
		NaiveBayes *model = ssi_create(NaiveBayes, 0, true);
		model->getOptions()->log = true;
		Trainer trainer (model);
		trainer.train (strain);
		trainer.save ("bayes");
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load (trainer, "bayes");			
		Evaluation eval;
		eval.eval (&trainer, sdevel);
		eval.print ();

		trainer.cluster (stest);
		ModelTools::PlotSamples(stest, "bayes", ssi_rect(650, 0, 400, 400));
	}

	// training
	{
		LDA *model = ssi_create(LDA, "lda", true);
		Trainer trainer (model);
		trainer.train (strain);

		model->print();
		trainer.save ("lda");
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load (trainer, "lda");
		Evaluation eval;
		eval.eval (&trainer, sdevel);
		eval.print ();

		trainer.cluster (stest);
		ModelTools::PlotSamples(stest, "lda", ssi_rect(650, 0, 400, 400));
	}

	ssi_print ("\n\n\tpress a key to contiue\n");
	getchar ();

	return true;
}
Example #8
0
bool ex_eval(void *arg) {

	ssi_size_t n_classes = 2;
	ssi_size_t n_samples = 20;
	ssi_size_t n_streams = 1;
	ssi_real_t train_distr[][3] = { 0.3f, 0.3f, 0.2f, 0.3f, 0.6f, 0.2f, 0.6f, 0.3f, 0.2f, 0.6f, 0.6f, 0.2f };
	ssi_real_t test_distr[][3] = { 0.5f, 0.5f, 0.5f };
	SampleList samples;		
	ModelTools::CreateTestSamples (samples, n_classes, n_samples, n_streams, train_distr);	
	ssi_char_t string[SSI_MAX_CHAR];	
	for (ssi_size_t n_class = 1; n_class < n_classes; n_class++) {
		ssi_sprint (string, "class%02d", n_class);
		samples.addClassName (string);
	}

	Evaluation eval;
	NaiveBayes *model = ssi_create (NaiveBayes, 0, true);
	Trainer trainer (model);
	trainer.train (samples);

	Evaluation2Latex e2latex;
	e2latex.open ("eval.tex");
	
	ssi_print_off ("devel set:\n");
	eval.eval (&trainer, samples);
	eval.print (ssiout);
	eval.print_result_vec ();

	e2latex.writeHead (eval, "caption", "label");
	e2latex.writeText ("results with different evaluation strategies", true);
	e2latex.writeEval ("devel", eval);
	
	ssi_print_off("k-fold:\n");
	eval.evalKFold (&trainer, samples, 3); 
	eval.print ();
	eval.print_result_vec ();

	e2latex.writeEval ("k-fold", eval);

	ssi_print_off("split:\n");
	eval.evalSplit (&trainer, samples, 0.5f); 
	eval.print ();
	eval.print_result_vec ();

	e2latex.writeEval ("split", eval);

	ssi_print_off("loo:\n");
	eval.evalLOO (&trainer, samples); 
	eval.print ();
	eval.print_result_vec ();

	e2latex.writeEval ("loo", eval);
	
	e2latex.writeTail ();
	e2latex.close ();

	FILE *fp = fopen("eval.csv", "w");
	eval.print(fp, Evaluation::PRINT::CSV_EX);
	fclose(fp);

	return true;
}