Example #1
0
int
main(void)
{
	DataReader reader;
	std::vector<fv_t> data;
	std::vector<fv_t> test_data;
	std::vector<label_t> labels;
	std::vector<label_t> test_labels;
	category_index_t category_index;
	NearestCentroidClassifier centroid_classifier;
	TFIDFTransformer tfidf;
	long t = tick();
	long t_all = tick();
	Evaluation evaluation;
	
	if (!reader.open(TRAIN_DATA)) {
		fprintf(stderr, "cant read file\n");
		return -1;
	}
	reader.read(data, labels);
	printf("read %ld, %ld, %ldms\n", data.size(), labels.size(), tick() - t);
	reader.close();
	
	t = tick();
	srand(VT_SEED);
	build_category_index(category_index, data, labels);
	split_data(test_data, test_labels, data, labels, category_index, 0.05f);
	build_category_index(category_index, data, labels);
	printf("split train:%ld, test:%ld\n", data.size(), test_data.size());
	
	t = tick();
	tfidf.train(data);
	tfidf.transform(data);
	tfidf.transform(test_data);
	centroid_classifier.train(category_index, data);
	printf("build index %ldms\n", tick() -t );
	
	t = tick();
#ifdef _OPENMP
#pragma omp parallel for schedule(dynamic, 1)	
#endif
	for (int i = 0; i < (int)test_data.size(); ++i) {
		std::vector<int> topn_labels;
		centroid_classifier.predict(topn_labels, K, test_data[i]);
#ifdef _OPENMP
#pragma omp critical
#endif
		{
			evaluation.update(topn_labels, test_labels[i]);
			if (i % 1000 == 0) {
				print_evaluation(evaluation, i, t);
				t = tick();
			}
		}
	}
	printf("----\n");
	print_evaluation(evaluation, test_data.size(), t_all);
	
	return 0;
}
Example #2
0
void train (const ssi_char_t *dir, const ssi_char_t *model) {

	// load samples
	StringList files;
	FileTools::ReadFilesFromDir (files, dir, "*.wav");
	SampleList samples;	
	samples.addUserName ("user");

	for (ssi_size_t i = 0; i < files.size (); i++) {
		ssi_stream_t *stream = new ssi_stream_t;
		ssi_sample_t *sample = new ssi_sample_t;
		const ssi_char_t *filename = files.get (i);
	
		// parse class name
		FilePath fp (files.get(i));
		ssi_char_t *class_name = ssi_strcpy (fp.getName ());
		for (ssi_size_t j = 0; j < strlen (class_name); j++) {
			if (class_name[j] == '_') {
				class_name[j] = '\0';
				break;
			}
		}
		ssi_size_t class_id = samples.addClassName (class_name);
		delete[] class_name;

		// read wave file
		WavTools::ReadWavFile (filename, *stream);

		// create sample
		sample->class_id = class_id;
		sample->num = 1;
		sample->score = 1.0f;
		sample->streams = new ssi_stream_t *[1];
		sample->streams[0] = stream;
		sample->time = 0;
		sample->user_id = 0;				

		// add sample
		samples.addSample (sample);
	}

	// extract features
	SampleList samples_t;
	EmoVoiceFeat *ev_feat = ssi_create (EmoVoiceFeat, "ev_feat", true);
	ModelTools::TransformSampleList (samples, samples_t, *ev_feat);
	
	// create model
	IModel *bayes = ssi_create (NaiveBayes, "bayes", true);
	Trainer trainer (bayes);

	// evalulation
	Evaluation eval;
	eval.evalKFold (&trainer, samples_t, 10);
	eval.print ();

	// train & save
	trainer.train (samples_t);
	trainer.save (model);
}
Example #3
0
bool ex_fusion(void *arg) {

	ssi_tic ();

	ssi_size_t n_classes = 4;
	ssi_size_t n_samples = 50;
	ssi_size_t n_streams = 3;
	ssi_real_t train_distr[][3] = { 0.25f, 0.25f, 0.1f, 0.25f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f };
	ssi_real_t test_distr[][3] = { 0.5f, 0.5f, 0.5f };
	SampleList strain;
	SampleList sdevel;
	SampleList stest;
	ModelTools::CreateTestSamples (strain, n_classes, n_samples, n_streams, train_distr, "user");			
	ModelTools::CreateTestSamples (sdevel, n_classes, n_samples, n_streams, train_distr, "user");	
	ModelTools::CreateTestSamples (stest, 1, n_samples * n_classes, n_streams, test_distr, "user");	
	ssi_char_t string[SSI_MAX_CHAR];	
	for (ssi_size_t n_class = 1; n_class < n_classes; n_class++) {
		ssi_sprint (string, "class%02d", n_class);
		stest.addClassName (string);
	}

	ssi_char_t *name = "fusion";

	// strain
	{
		IModel **models = new IModel *[n_streams];
		ssi_char_t string[SSI_MAX_CHAR];
		for (ssi_size_t n_stream = 0; n_stream < n_streams; n_stream++) {
			ssi_sprint (string, "%s.%02d", name, n_stream);
			models[n_stream] = ssi_create(SimpleKNN, string, true);
		}
		SimpleFusion *fusion = ssi_create (SimpleFusion, name, true);

		Trainer trainer (n_streams, models, fusion);
		trainer.train (strain);
		trainer.save ("fusion");

		delete[] models;
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load (trainer, "fusion");					
		Evaluation eval;
		eval.eval (&trainer, sdevel);
		eval.print ();
	}

	ssi_print_off("");
	ssi_toc_print ();
	ssi_print("\n");

	return true;
}
Example #4
0
inline bool Type_::TakeOperand(
	Evaluation & theEvaluation,
	TheOperand & theOperand
) {
	assert(
		!theOperand.IsEmpty()
	);
	assert(this->thisOperandCount < 3);
	switch (this->thisOperandCount++) {
	case 0:
		this->thisEmptyCase.Take(theOperand);
		return false;
	case 1:
		this->thisNonEmptyCase.Take(theOperand);
		return false;
	default:
		{
			TheOperand const & theConstOperand = theOperand;
			theEvaluation.TakeOperand(
				theConstOperand.GetProgram()->IsEmpty() ?
				this->thisEmptyCase :
				this->thisNonEmptyCase
			);
		}
		return true;
	}
}
Example #5
0
bool ex_model_norm(void *arg) {

	Trainer::SetLogLevel(SSI_LOG_LEVEL_DEBUG);

	ssi_size_t n_classes = 4;
	ssi_size_t n_samples = 50;
	ssi_size_t n_streams = 1;
	ssi_real_t train_distr[][3] = { 0.25f, 0.25f, 0.1f, 0.25f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f };
	ssi_real_t test_distr[][3] = { 0.5f, 0.5f, 0.5f };
	SampleList strain;
	SampleList sdevel;
	SampleList stest;
	ModelTools::CreateTestSamples(strain, n_classes, n_samples, n_streams, train_distr, "user");
	ModelTools::CreateTestSamples(sdevel, n_classes, n_samples, n_streams, train_distr, "user");
	ModelTools::CreateTestSamples(stest, 1, n_samples * n_classes, n_streams, test_distr, "user");
	ssi_char_t string[SSI_MAX_CHAR];
	for (ssi_size_t n_class = 1; n_class < n_classes; n_class++) {
		ssi_sprint(string, "class%02d", n_class);
		stest.addClassName(string);
	}

	// train svm
	{
		SVM *model = ssi_create(SVM, 0, true);
		model->getOptions()->seed = 1234;		
		Trainer trainer(model);
		ISNorm::Params params;
		ISNorm::ZeroParams(params, ISNorm::METHOD::ZSCORE);
		trainer.setNormalization(&params);
		trainer.train(strain);
		trainer.save("svm+norm");
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load(trainer, "svm+norm");
		Evaluation eval;
		eval.eval(&trainer, sdevel);
		eval.print();

		trainer.cluster(stest);
		ModelTools::PlotSamples(stest, "svm (external normalization)", ssi_rect(650,0,400,400));
	}

	return true;
}
inline void Type_::Give(Evaluation & theEvaluation) {
	theEvaluation.TakeOperation(
		std::auto_ptr<IncompleteOperation>(
			new PullOperation<
				Literal,
				BackPullOperandOperation
			>
		)
	);
}
inline void Type_::Give(Evaluation & theEvaluation) {
	theEvaluation.TakeOperation(
		std::auto_ptr<IncompleteOperation>(
			new PullOperation<
				Operator,
				FrontPullCodePointOperation
			>
		)
	);
}
Example #8
0
inline void Type_::Give(Evaluation & theEvaluation) {
	theEvaluation.TakeOperation(
		std::auto_ptr<IncompleteOperation>(
			new PullOperation<
				Expression,
				BackPullFormOperation
			>
		)
	);
}
inline void Type_::Give(Evaluation & theEvaluation) {
	theEvaluation.TakeOperation(
		std::auto_ptr<IncompleteOperation>(
			new PullOperation<
				Operator,
				BackPullCharacterOperation
			>
		)
	);
}
Example #10
0
static void
print_evaluation(const Evaluation &evaluation, int i, long t)
{
	double maf, map, mar, top1_acc;
	evaluation.score(maf, map, mar, top1_acc);
	
	printf("--- %d MaF: %f, MaP:%f, MaR:%f, Top1ACC: %f %ldms\n",
		   i,
		   maf, map, mar, top1_acc,
		   tick() -t);
}
Example #11
0
bool Rank::train (ISamples &samples,
	ssi_size_t stream_index) {

	if (!_model) {
		ssi_wrn ("a model has not been set yet");
		return false;
	}

	release ();

	_n_scores = samples.getStream (stream_index).dim;	
	_scores = new score[_n_scores];

	Evaluation eval;
	Trainer trainer (_model, stream_index);
	
	SSI_DBG (SSI_LOG_LEVEL_DEBUG, "evaluate dimensions:");
	for (ssi_size_t ndim = 0; ndim < _n_scores; ndim++) {
		ISSelectDim samples_s (&samples);
		samples_s.setSelection (stream_index, 1, &ndim);
		if (_options.loo) {
			eval.evalLOO (&trainer, samples_s);
		} else if (_options.louo) {
			eval.evalLOUO (&trainer, samples_s);
		} else {
			eval.evalKFold (&trainer, samples_s, _options.kfold);
		}
		_scores[ndim].index = ndim;
		_scores[ndim].value = eval.get_classwise_prob ();
		SSI_DBG (SSI_LOG_LEVEL_DEBUG, "  #%02u -> %.2f", _scores[ndim].index, _scores[ndim].value);
	} 

	trainer.release ();

	return true;
}
Example #12
0
bool ex_model_frame(void *args)
{
	ssi_size_t n_classes = 4;
	ssi_size_t n_samples = 50;
	ssi_size_t n_streams = 1;
	ssi_real_t distr[][3] = { 0.25f, 0.25f, 0.1f, 0.25f, 0.75f, 0.1f, 0.75f, 0.25f, 0.1f, 0.75f, 0.75f, 0.1f };
	ssi_size_t num_min = 2;
	ssi_size_t num_max = 5;

	SampleList strain, sdevel;
	ModelTools::CreateDynamicTestSamples(strain, n_classes, n_samples, n_streams, distr, num_min, num_max, "user");
	ModelTools::PrintInfo(strain);
	ModelTools::CreateDynamicTestSamples(sdevel, n_classes, n_samples, n_streams, distr, num_min, num_max, "user");
	ModelTools::PrintInfo(sdevel);

	{
		FrameFusion *model = ssi_create(FrameFusion, 0, true);
		model->getOptions()->method = FrameFusion::METHOD::PRODUCT;
		model->getOptions()->n_context = 2;
		model->setModel(ssi_create(SVM, 0, true));
		Trainer trainer(model);
		trainer.train(strain);
		trainer.save("framefusion");
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load(trainer, "framefusion");
		Evaluation eval;
		eval.eval(&trainer, sdevel);
		eval.print();
	}

	return true;
}
Example #13
0
bool ex_eval_regression(void *arg) {

	Trainer::SetLogLevel(SSI_LOG_LEVEL_DEBUG);

	ssi_size_t n_samples = 1000;

	SampleList strain;
	SampleList sdevel;
	SampleList stest;
	ModelTools::CreateTestSamplesRegression(strain, n_samples, 0.1f);
	ModelTools::CreateTestSamplesRegression(stest, n_samples, 0.1f);

	LibSVM *model = ssi_create(LibSVM, 0, true);
	model->getOptions()->seed = 1234;
	model->getOptions()->silent = false;
	model->getOptions()->params.svm_type = LibSVM::TYPE::EPSILON_SVR;
	model->getOptions()->params.kernel_type = LibSVM::KERNEL::RADIAL;
	

	Trainer trainer(model);
	ISNorm::Params params;
	ISNorm::ZeroParams(params, ISNorm::METHOD::SCALE);
	params.limits[0] = 0.0f;
	params.limits[1] = 1.0f;
	trainer.setNormalization(&params);
	//ModelTools::PlotSamplesRegression(strain, "TRAINING", ssi_rect(640, 0, 400, 400));
	trainer.train(strain);
	
	Evaluation eval;
	eval.eval(&trainer, stest);

	ssi_real_t pcc = eval.get_metric(Evaluation::METRIC::PEARSON_CC);
	ssi_real_t mse = eval.get_metric(Evaluation::METRIC::MSE);
	ssi_real_t rmse = eval.get_metric(Evaluation::METRIC::RMSE);

	ssi_print("\n -------------------------------------");
	ssi_print("\n PCC: %.4f", pcc);
	ssi_print("\n MSE: %.4f", mse);
	ssi_print("\n RMSE: %.4f", rmse);
	ssi_print("\n -------------------------------------\n");


	FILE *fp = fopen("eval_regression.csv", "w");
	eval.print(fp, Evaluation::PRINT::CSV_EX);
	fclose(fp);

	//ModelTools::PlotSamplesRegression(stest, "TEST", ssi_rect(640, 0, 400, 400));

	return true;

}
Example #14
0
inline bool Type_::TakeQuotedProducer(
	Evaluation & theEvaluation,
	TheProducer & theProducer
) {
	if (
		this->thisExpression.IsEmpty()
	) {
		this->thisExpression.TakeElements(theProducer);
		if (
			this->thisExpression.IsEmpty()
		) {
			QuoteOperation::Give(theEvaluation);
			return true;
		}
		return false;
	}
	this->thisExpression.TakeQuotedProducer(theProducer);
	theEvaluation.TakeQuotedProducer(this->thisExpression);
	return true;
}
Example #15
0
inline bool Type_::TakeQuotedProducer(
	Evaluation & theEvaluation,
	TheProducer & theProducer
) {
	if (
		this->thisProgram.IsEmpty()
	) {
		this->thisProgram.TakeElements(theProducer);
		if (
			this->thisProgram.IsEmpty()
		) {
			ProgramOperation<ThisProgram>::Give(theEvaluation);
			return true;
		}
		return false;
	}
	this->thisProgram.TakeElements(theProducer);
	theEvaluation.TakeQuotedProducer(this->thisProgram);
	return true;
}
Example #16
0
inline bool Type_::TakeOperand(
	Evaluation & theEvaluation,
	TheOperand & theOperand
) {
	assert(
		!theOperand.IsEmpty()
	);
	if (
		this->thisExpression.IsEmpty()
	) {
		this->thisExpression.TakeElements(*theOperand);
		if (
			this->thisExpression.IsEmpty()
		) {
			QuoteOperation::Give(theEvaluation);
			return true;
		}
		return false;
	}
	this->thisExpression.TakeOperand(theOperand);
	theEvaluation.TakeQuotedProducer(this->thisExpression);
	return true;
}
Example #17
0
int _tmain(int argc, _TCHAR* argv[])
{
	time_t start,end;
	start=clock();
//************************************************************
	string readpathname;
	if(argc==1||argc==0){
		string filepath="F:\\xudayong\\Evaluation\\ApplicationPrograms\\";
		readpathname=filepath+EVALUATION_COMPUTE_INFO_FILE_NAME;;
	}
	else if(argc==2){
		char tempstring[1000];
		int i=0;
		while(argv[1][i]!='\0'){
			tempstring[i]=(char)argv[1][i];
			i++;
		}
		tempstring[i]='\0';
		readpathname=(std::string)tempstring;
	}
//************************************************************	
	Evaluation tempeva;
	if(!tempeva.Readinfo(readpathname)){
		//读取配置信息
		return 0;
	}
//	tempeva.GetCellsInRegion();//获得所需计算天线信息
	//int cellsize=(int)tempeva.CellsInRegion.size();//天线个数
	string resultpath1=tempeva.result_path_name+(string)"\\EVA_RESULT";//结果文件夹
	if(_access(resultpath1.c_str(),0)==0)//为真表示文件夹存在,则不做任何处理,如果不存在则新建一个文件夹
	{
		//cout<<"directory exist\n";
	}
	else{
		string delresultpath=(string)"rmdir "+resultpath1+(string)" /q/s";
		_mkdir(resultpath1.c_str());
		if(_mkdir(resultpath1.c_str())==-1){
			system(delresultpath.c_str());
			_mkdir(resultpath1.c_str());
		}
	}
	string result_file_name=resultpath1+(string)"\\cell_eva_compute_"+tempeva.networksort+(string)"_"+tempeva.time;
	string result_file_name1=result_file_name+(string)".csv";
	string result_gridfile_name=resultpath1+(string)"\\grid_eva_compute_"+tempeva.networksort+(string)"_"+tempeva.time;
	string result_gridfile_name1=result_file_name+(string)".csv";
	int i=0;
	char num[64];
	while(_access(result_file_name1.c_str(),0)==0){//如果结果文档存在,则最后的标号加1,这是为了保存以前的计算结果
		sprintf_s(num,"%d",i);
		string result_file_name_new=result_file_name+(string)"_"+(string)num+".csv";
		string result_gridfile_name_new=result_gridfile_name+(string)"_"+(string)num+".csv";
		if(_access(result_file_name_new.c_str(),0)!=0||_access(result_gridfile_name_new.c_str(),0)!=0){
			result_file_name1=result_file_name_new;
			result_gridfile_name1=result_gridfile_name_new;
			break;
		}
		i++;
	}
/************************************************************
	vector<Evaluation> tempcellcompute;
	tempcellcompute.clear();
	tempcellcompute.resize(cellsize);
#pragma  omp parallel for
	for(int i=0;i<cellsize;i++){
	cout<<"************************************************************** i:"<<i<<endl;
		tempcellcompute[i].Readinfo(tempreadinfo);
		tempcellcompute[i].Getcellinfo(tempreadinfo.CellsInRegion[i]);
		tempcellcompute[i].Getcell_coverinfo();
		tempcellcompute[i].Getgrid_info();
		tempcellcompute[i].Grid_Compute();
		tempcellcompute[i].Get_Crossever();
		tempcellcompute[i].Get_Bias();
	}
*/
	if(tempeva.Eval()){
		
		bool r=tempeva.GetRegionKPI();
		if(r==true){
			cout<<"Writing Region Evluation Result File "<<result_file_name1<<endl;
			tempeva.WriteRegionResult(result_file_name1);
			//tempeva.WriteCellsResult(result_file_name1);
			tempeva.WriteGridsInfo(result_gridfile_name1);
			end=clock();
			cout<<"evaluation complete in total "<<difftime(end,start)/1000<<"s.\n";
			return 0;
		}
		else{
			cout<<"Evalution of Region is not complete due to errors"<<endl;
		}
		
	}
	else{
		cout<<"evaluation process error\n";
		return 0;
	}
}
Example #18
0
TEST(evaluationtest, testEvaluate) {
  Position position(Notation::toPosition(Notation::STANDARDPOSITION));
  Evaluation evaluation;

  EXPECT_EQ(+Evaluation::TEMPO, evaluation.evaluate(position));
}
Example #19
0
int main(int argc, char ** argv) {
	Aargh args(argc,argv);

	if(args.GetArgCount()==0||!args.HasArg("t")||!args.HasArg("m")) {
		cout << "Usage:" << endl
			<< "\t" << argv[0] << " -t testfile -m modelfile" << endl
			<< endl
			<< "Optional parameters:" << endl
			<< "\t-h <number of hypothesis to use from model> (default: all)" << endl
			<< "\t-oe <name of output evaluation file> (default: testfile.evaluation)" << endl
			<< "\t-op <name of output prediction file> (default: testfile.prediction)" << endl
			<< "\t-v <verbosity> [0-2] (default:1)" << endl
			<< "\t-nE (do NOT save evaluations, default: save)"<< endl
			<< "\t-nP (do NOT save predictions, default: save)"<< endl;

		return ERROR_COMMANDLINE;
	}

	int verbosity = args.GetArg<int>("v",1);

	BoostingModel * model = NULL;
	ExampleSet * exampleSet = NULL;
	Evaluation * evaluation = NULL;
	PredictionSet * predictionSet = NULL;

	if(verbosity>0)
		cout << "Loading data" << endl;

	string modelname = args.GetArg<string>("m","nofile.model");
	if(verbosity>1)
		cout << "Loading model file: " << modelname << endl;
	ifstream modelfile(modelname.c_str());
	if(args.HasArg("h")) {
	  int hyp = args.GetArg<int>("h",100);
	  if(verbosity>0)
	    cout << "Using the first " << hyp << " hypothesis" << endl;
	  model = BoostingModel::Read(modelfile,hyp);
	}
	else {
	  model = BoostingModel::Read(modelfile);
	  if(verbosity>0)
	    cout << "Using all the hypothesis in the model (" << model->GetHypothesisCount() << ")" << endl;
	}
	modelfile.close();
	if(verbosity>1)
	  cout << "Model file loaded (" << model->GetCompletedIterations() << " iterations, " <<
	    model->GetCategoryCount() << " categories)" << endl;
	
	if(args.HasArg("nP")) 
	  predictionSet = NULL;
	else
	  predictionSet = new PredictionSet(model->GetCategoryCount());
	
	if(args.HasArg("nE")) 
	  evaluation = NULL;
	else
	  evaluation = new Evaluation(model->GetCategoryCount());
	
	if(verbosity>0)
	  cout << "Data loaded" << endl;
	
	string testname = "nofile";
	
	testname = args.GetArg<string>("t","nofile");
	if(verbosity>1)
	  cout << "Opening test file: " << testname << endl;
	
	ifstream testfile(testname.c_str());

	if(verbosity>0)
	  cout << "Starting test" << endl;

	clock_t start = clock();
	int retval = model->RunTest(testfile,evaluation,predictionSet,cout,verbosity);
	clock_t end = clock();

	testfile.close();

	if(verbosity>0)
		cout << "Test completed in " << (end-start)/(double)CLOCKS_PER_SEC << " seconds." << endl;


	if(!args.HasArg("nE")) {
		string outevaluationname = args.GetArg<string>("oe",testname+".evaluation");
		if(verbosity>0)
			cout << "Serializing evaluation to file: " << outevaluationname << endl;
		ofstream outevaluation(outevaluationname.c_str());
		evaluation->Write(outevaluation);
		outevaluation.close();
		if(verbosity>0)
			cout << "Serialization completed" << endl;
		delete evaluation;
	}

	if(!args.HasArg("nP")) {
		string outpredictionname = args.GetArg<string>("op",testname+".prediction");
		if(verbosity>0)
			cout << "Serializing prediction to file: " << outpredictionname << endl;
		ofstream outprediction(outpredictionname.c_str());
		predictionSet->Write(outprediction);
		outprediction.close();
		if(verbosity>0)
			cout << "Serialization completed" << endl;
		delete predictionSet;
	}

	delete exampleSet;
	delete model;

	return retval;
}
void ResultEvaluation::playAnimation(){
    
    Evaluation* e = Evaluation::create();
    addChild( e );
	
    Sequence* animation = nullptr;
	StageNumber stageNumber;
	ResultData resultData;
	int stageNum = stageNumber.loadStageNumber();
	int ColorR = resultData.readColorR( stageNum );
    int ColorG = resultData.readColorG( stageNum );
    int ColorB = resultData.readColorB( stageNum );
    
    if ( e->getTag() == EvaluationTag::BAD ){
        
        animation = Sequence::create( DelayTime::create( 0.5f ), CallFunc::create( [ = ] (){
            mIsAnimation = true;
            Sprite* sprite = Sprite::create("Texture/GameResult/" + EvaluationPath.at( EvaluationTag::BAD ) + ".png" );
            sprite->setColor( Color3B( ColorR, ColorG, ColorB ) );
            //sprite->setPosition( 360, 930 );
			ADX2Player::getInstance().play( 9 );
			sprite->setAnchorPoint( Vec2::ANCHOR_MIDDLE );
            this->addChild(sprite);
        }), nullptr);

    }
    
    if ( e->getTag() == EvaluationTag::GOOD ){
        
        animation = Sequence::create( DelayTime::create( 0.5f ), CallFunc::create( [ = ] (){
            mIsAnimation = true;
            Sprite* sprite = Sprite::create("Texture/GameResult/" + EvaluationPath.at( EvaluationTag::GOOD ) + ".png" );
            sprite->setColor( Color3B( ColorR, ColorG, ColorB ) );
            //sprite->setPosition( 360, 930 );
			ADX2Player::getInstance().play( 9 );
			sprite->setAnchorPoint( Vec2::ANCHOR_MIDDLE );
            this->addChild(sprite);
        }), nullptr);

    }
    
    if ( e->getTag() == EvaluationTag::EXCELLENT ){
    
        animation = Sequence::create( DelayTime::create( 0.5f ), CallFunc::create( [ = ] (){
            mIsAnimation = true;
            Sprite* sprite = Sprite::create("Texture/GameResult/" + EvaluationPath.at( EvaluationTag::EXCELLENT ) + ".png" );
            sprite->setColor( Color3B( ColorR, ColorG, ColorB ) );
            //sprite->setPosition( 360, 930 );
			ADX2Player::getInstance().play( 9 );
			sprite->setAnchorPoint( Vec2::ANCHOR_MIDDLE );
            this->addChild(sprite);
        }), nullptr);

    }
    
    if ( e->getTag() == EvaluationTag::PERFECT ){
    
        animation = Sequence::create( DelayTime::create( 0.5f ), CallFunc::create( [ = ] (){
            mIsAnimation = true;
            Sprite* sprite = Sprite::create("Texture/GameResult/" + EvaluationPath.at( EvaluationTag::PERFECT ) + ".png" );
            sprite->setColor( Color3B( ColorR, ColorG, ColorB ) );
            //sprite->setPosition( 360, 930 );
			ADX2Player::getInstance().play( 9 );
			sprite->setAnchorPoint( Vec2::ANCHOR_MIDDLE );
            this->addChild(sprite);
        }), nullptr);

    }
    
    animation->setTag( 0 );
    this->runAction( animation );
}
Example #21
0
bool ex_eval(void *arg) {

	ssi_size_t n_classes = 2;
	ssi_size_t n_samples = 20;
	ssi_size_t n_streams = 1;
	ssi_real_t train_distr[][3] = { 0.3f, 0.3f, 0.2f, 0.3f, 0.6f, 0.2f, 0.6f, 0.3f, 0.2f, 0.6f, 0.6f, 0.2f };
	ssi_real_t test_distr[][3] = { 0.5f, 0.5f, 0.5f };
	SampleList samples;		
	ModelTools::CreateTestSamples (samples, n_classes, n_samples, n_streams, train_distr);	
	ssi_char_t string[SSI_MAX_CHAR];	
	for (ssi_size_t n_class = 1; n_class < n_classes; n_class++) {
		ssi_sprint (string, "class%02d", n_class);
		samples.addClassName (string);
	}

	Evaluation eval;
	NaiveBayes *model = ssi_create (NaiveBayes, 0, true);
	Trainer trainer (model);
	trainer.train (samples);

	Evaluation2Latex e2latex;
	e2latex.open ("eval.tex");
	
	ssi_print_off ("devel set:\n");
	eval.eval (&trainer, samples);
	eval.print (ssiout);
	eval.print_result_vec ();

	e2latex.writeHead (eval, "caption", "label");
	e2latex.writeText ("results with different evaluation strategies", true);
	e2latex.writeEval ("devel", eval);
	
	ssi_print_off("k-fold:\n");
	eval.evalKFold (&trainer, samples, 3); 
	eval.print ();
	eval.print_result_vec ();

	e2latex.writeEval ("k-fold", eval);

	ssi_print_off("split:\n");
	eval.evalSplit (&trainer, samples, 0.5f); 
	eval.print ();
	eval.print_result_vec ();

	e2latex.writeEval ("split", eval);

	ssi_print_off("loo:\n");
	eval.evalLOO (&trainer, samples); 
	eval.print ();
	eval.print_result_vec ();

	e2latex.writeEval ("loo", eval);
	
	e2latex.writeTail ();
	e2latex.close ();

	FILE *fp = fopen("eval.csv", "w");
	eval.print(fp, Evaluation::PRINT::CSV_EX);
	fclose(fp);

	return true;
}
Example #22
0
bool FeatureFusion::train (ssi_size_t n_models,
	IModel **models,
	ISamples &samples) {

	if (samples.getSize () == 0) {
		ssi_wrn ("empty sample list");
		return false;
	}

	if (isTrained ()) {
		ssi_wrn ("already trained");
		return false;
	}

	_n_streams = samples.getStreamSize ();
	_n_classes = samples.getClassSize ();
	_n_models  = n_models;

	//initialize weights
	ssi_real_t **weights = new ssi_real_t*[n_models];
	for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
		weights[n_model] = new ssi_real_t[_n_classes+1];		
	}

	if (samples.hasMissingData ()) {

		_handle_md = true;

		ISMissingData samples_h (&samples);
		Evaluation eval;
		
		if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
			ssi_print("\nMissing data detected.\n");
		}
		
		//models[0] is featfuse_model, followed by singlechannel_models
		ISMergeDim ffusionSamples (&samples);
		ISMissingData ffusionSamples_h (&ffusionSamples);
		ffusionSamples_h.setStream(0);
		if (!models[0]->isTrained ()) { models[0]->train (ffusionSamples_h, 0); }

		if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
			eval.eval (*models[0], ffusionSamples_h, 0);
			eval.print();
		}
		//dummy weights for fused model
		for (ssi_size_t n_class = 0; n_class < _n_classes; n_class++) {
			weights[0][n_class] = 0.0f;
		}		
		weights[0][_n_classes] = 0.0f;	
		
		for (ssi_size_t n_model = 1; n_model < n_models; n_model++) {
			
			if (!models[n_model]->isTrained ()) {
				samples_h.setStream (n_model - 1);
				models[n_model]->train (samples_h, n_model - 1);
			}

			eval.eval (*models[n_model], samples_h, n_model - 1);

			if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
				eval.print();
			}

			for (ssi_size_t n_class = 0; n_class < _n_classes; n_class++) {
				weights[n_model][n_class] = eval.get_class_prob (n_class);
			}		
			weights[n_model][_n_classes] = eval.get_classwise_prob ();	
		}

		//calculate fillers
		_filler = new ssi_size_t[_n_streams];
		for (ssi_size_t n_fill = 0; n_fill < _n_streams; n_fill++) {
			_filler[n_fill] = 1;
			ssi_real_t filler_weight = weights[1][_n_classes];
			for (ssi_size_t n_model = 2; n_model < n_models; n_model++) {
				if (filler_weight < weights[n_model][_n_classes]) {
					_filler[n_fill] = n_model;
					filler_weight = weights[n_model][_n_classes];
				}
			}
			weights[_filler[n_fill]][_n_classes] = 0.0f;
		}
		if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
			ssi_print("\nfiller:\n");
			for (ssi_size_t n_model = 0; n_model < _n_streams; n_model++) {
				ssi_print("%d ", _filler[n_model]);
			}ssi_print("\n");
		}
	
	}
	else{

		_handle_md = false;

		if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
			ssi_print("\nNo missing data detected.\n");
		}
		ISMergeDim ffusionSamples (&samples);
		if (!models[0]->isTrained ()) { models[0]->train (ffusionSamples, 0); }
		//dummy
		_filler = new ssi_size_t[_n_streams];
		for (ssi_size_t n_fill = 0; n_fill < _n_streams; n_fill++) {
			_filler[n_fill] = 0;
		}
	}

	if (weights) {
		for (ssi_size_t n_model = 0; n_model < _n_models; n_model++) {
			delete[] weights[n_model];
		}
		delete[] weights;
		weights = 0;
	}

	return true;
}
Example #23
0
bool ex_model(void *arg) {

	Trainer::SetLogLevel (SSI_LOG_LEVEL_DEBUG);

	ssi_size_t n_classes = 4;
	ssi_size_t n_samples = 50;
	ssi_size_t n_streams = 1;
	ssi_real_t train_distr[][3] = { 0.25f, 0.25f, 0.1f, 0.25f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f, 0.75f, 0.75f, 0.1f };
	ssi_real_t test_distr[][3] = { 0.5f, 0.5f, 0.5f };
	SampleList strain;
	SampleList sdevel;
	SampleList stest;
	ModelTools::CreateTestSamples (strain, n_classes, n_samples, n_streams, train_distr, "user");	
	ModelTools::CreateTestSamples (sdevel, n_classes, n_samples, n_streams, train_distr, "user");	
	ModelTools::CreateTestSamples (stest, 1, n_samples * n_classes, n_streams, test_distr, "user");	
	ssi_char_t string[SSI_MAX_CHAR];	
	for (ssi_size_t n_class = 1; n_class < n_classes; n_class++) {
		ssi_sprint (string, "class%02d", n_class);
		stest.addClassName (string);
	}
	
	// train svm
	{
		SVM *model = ssi_create(SVM, 0, true);
		model->getOptions()->seed = 1234;
		Trainer trainer(model);
		trainer.train(strain);
		trainer.save("svm");
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load(trainer, "svm");
		Evaluation eval;
		eval.eval(&trainer, sdevel);
		eval.print();

		trainer.cluster(stest);
		ModelTools::PlotSamples(stest, "svm (internal normalization)", ssi_rect(650, 0, 400, 400));
	}

	// train knn
	{
		KNearestNeighbors *model = ssi_create(KNearestNeighbors, 0, true);
		model->getOptions()->k = 5;
		//model->getOptions()->distsum = true;
		Trainer trainer (model);
		trainer.train (strain);
		trainer.save ("knn");
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load (trainer, "knn");			
		Evaluation eval;
		eval.eval (&trainer, sdevel);
		eval.print ();

		trainer.cluster (stest);
		ModelTools::PlotSamples(stest, "knn", ssi_rect(650, 0, 400, 400));
	}

	// train naive bayes
	{
		NaiveBayes *model = ssi_create(NaiveBayes, 0, true);
		model->getOptions()->log = true;
		Trainer trainer (model);
		trainer.train (strain);
		trainer.save ("bayes");
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load (trainer, "bayes");			
		Evaluation eval;
		eval.eval (&trainer, sdevel);
		eval.print ();

		trainer.cluster (stest);
		ModelTools::PlotSamples(stest, "bayes", ssi_rect(650, 0, 400, 400));
	}

	// training
	{
		LDA *model = ssi_create(LDA, "lda", true);
		Trainer trainer (model);
		trainer.train (strain);

		model->print();
		trainer.save ("lda");
	}

	// evaluation
	{
		Trainer trainer;
		Trainer::Load (trainer, "lda");
		Evaluation eval;
		eval.eval (&trainer, sdevel);
		eval.print ();

		trainer.cluster (stest);
		ModelTools::PlotSamples(stest, "lda", ssi_rect(650, 0, 400, 400));
	}

	ssi_print ("\n\n\tpress a key to contiue\n");
	getchar ();

	return true;
}
bool WeightedMajorityVoting::train (ssi_size_t n_models,
	IModel **models,
	ISamples &samples) {

	if (samples.getSize () == 0) {
		ssi_wrn ("empty sample list");
		return false;
	}

	if (samples.getStreamSize () != n_models) {
		ssi_wrn ("#models (%u) differs from #streams (%u)", n_models, samples.getStreamSize ());
		return false;
	}

	if (isTrained ()) {
		ssi_wrn ("already trained");
		return false;
	}  

	_n_streams = samples.getStreamSize ();
	_n_classes = samples.getClassSize ();
	_n_models  = n_models;

	_weights = new ssi_real_t*[n_models];
	for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
		_weights[n_model] = new ssi_real_t[_n_classes+1];		
	}

	if (samples.hasMissingData ()) {
		ISMissingData samples_h (&samples);
		Evaluation eval;
		for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
			if (!models[n_model]->isTrained ()) {
				samples_h.setStream (n_model);
				models[n_model]->train (samples_h, n_model);
			}
			eval.eval (*models[n_model], samples_h, n_model);
			for (ssi_size_t n_class = 0; n_class < _n_classes; n_class++) {
				_weights[n_model][n_class] = eval.get_class_prob (n_class);
			}		
			_weights[n_model][_n_classes] = eval.get_classwise_prob ();	
		}
	}
	else{
		Evaluation eval;
		for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
			if (!models[n_model]->isTrained ()) { models[n_model]->train (samples, n_model); }
			eval.eval (*models[n_model], samples, n_model);
			for (ssi_size_t n_class = 0; n_class < _n_classes; n_class++) {
				_weights[n_model][n_class] = eval.get_class_prob (n_class);
			}		
			_weights[n_model][_n_classes] = eval.get_classwise_prob ();
		}		
	}

	if (ssi_log_level >= SSI_LOG_LEVEL_DEBUG) {
		ssi_print("\nClassifier Weights: \n");
		for (ssi_size_t n_model = 0; n_model < n_models; n_model++) {
			for (ssi_size_t n_class = 0; n_class < _n_classes; n_class++) {
				ssi_print ("%f ", _weights[n_model][n_class]);
			}
			ssi_print ("%f\n", _weights[n_model][_n_classes]);
		}
	}

	return true;
}
Example #25
0
inline void Type_::Give(Evaluation & theEvaluation) {
    Lexicon theLexicon;
    theEvaluation.GetTranslator().GiveElements(theLexicon);
    theEvaluation.TakeQuotedProducer(theLexicon);
}