Exemplo n.º 1
0
void dfs(int x, int y, vector<vector<int>> &inp, vector < vector <bool>> &hit)
{
	if (x < 0 || w <= x || y < 0 || h <= y || inp[y][x] == 0) { return; }
	if (hit[y][x]) return;
	hit[y][x] = true;
	srch(x + 1, y, inp, hit);
	srch(x - 1, y, inp, hit);
	srch(x, y + 1, inp, hit);
	srch(x, y - 1, inp, hit);
}
Exemplo n.º 2
0
int main(int argc, char *argv[]){
	
	int chk;						//エラー検出用変数 0でなければエラー
	int s_org = 0;					//検索対象テキスト指定のオプション
	int s_find = 0;					//検索文字列指定のオプション
	
	for(int i = 1; i < argc; i++){	//1つ目のパラメータは実行ファイルなので弾く
//	ループ毎に1つずつコマンドラインオプションを取得する:argchk(argv[i])
		if (argchk(argv[i]) == 15){			//すでに検出されたものでないかチェック
			if (s_org == 0){
				s_org = i;					//-oがあった配列番号
			}else{
				chk = 1;
			}
		}
		if (argchk(argv[i]) == 6){
			if (s_find == 0){
				s_find = i;					//-fがあった配列番号
			}else{
				chk = 1;
			}
		}
	}
	
//	printf("s_org:%d\ns_find:%d\n", s_org, s_find);
	if (chk == 0) {
		chk = srch(argv[s_org+1], argv[s_find+1]);		//検索
	}
	errmsg(chk);					//エラーメッセージを表示
	return 0;
}
Exemplo n.º 3
0
int main(int argc, const char ** argv) {
	
	double dt = 0.01;
	
	//int N = atoi(argv[1]);
	
	printf("%i\n",(int)sizeof(math::vec3));

	if(argc != 2) {
		printf("usage: %s <mode>\n",argv[0]);
		exit(0);
	}

	if(strcmp(argv[1],"n")==0) {
		normal(5000, dt);
	} else if(strcmp(argv[1],"m")==0) {
		map();
	} else if(strcmp(argv[1],"s")==0) {
		srch();
	} else {
		printf("invalid mode\n");
	}

	//b->att_->write();
}
Exemplo n.º 4
0
/*! \section example33 Example 33: Oscillating Search in very high-dimensional feature selection.

Very high-dimensional feature selection in text categorization, with 
dimensionality in the order of 10000 or 100000.
The standard approach is BIF, yet we show here that a non-trivial
search procedure (OS) can be feasible. Here OS is applied in its
fastest form (delta=1), initialized by means of BIF. We use Multinomial
Bhattacharyya distance as the feature selection criterion (it has been
shown capable of overperforming traditional tools like Information
Gain etc., cf. Novovicova et al., LNCS 4109, 2006). 
Randomly sampled 50% of data is used for multinomial
model parameter estimation to be used in the actual feature selection
process, another (disjunct) 40% of data is randomly sampled for testing. 
The selected subset is eventually used for validation; multinomial Naive Bayes
classifier is trained on the training data on the selected subset
and classification accuracy is finally estimated on the test data.
*/
int main()
{
	try{
	typedef double RETURNTYPE; 	typedef double DATATYPE;  typedef double REALTYPE;
	typedef unsigned int IDXTYPE;  typedef unsigned int DIMTYPE;  typedef short BINTYPE;
	typedef FST::Subset<BINTYPE, DIMTYPE> SUBSET;
	typedef FST::Data_Intervaller<std::vector<FST::Data_Interval<IDXTYPE> >,IDXTYPE> INTERVALLER;
	typedef boost::shared_ptr<FST::Data_Splitter<INTERVALLER,IDXTYPE> > PSPLITTER;
	typedef FST::Data_Splitter_RandomRandom<INTERVALLER,IDXTYPE,BINTYPE> SPLITTERRR;
	//typedef FST::Data_Accessor_Splitting_MemTRN<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for TRN data format
	typedef FST::Data_Accessor_Splitting_MemARFF<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for ARFF data format
	typedef FST::Criterion_Multinomial_Bhattacharyya<RETURNTYPE,DATATYPE,REALTYPE,IDXTYPE,DIMTYPE,SUBSET,DATAACCESSOR> BHATTMULTINOMIALDIST;
	typedef FST::Classifier_Multinomial_NaiveBayes<RETURNTYPE,DATATYPE,REALTYPE,IDXTYPE,DIMTYPE,SUBSET,DATAACCESSOR> CLASSIFIERMULTINOMIAL;
	typedef FST::Sequential_Step_Straight<RETURNTYPE,DIMTYPE,SUBSET,BHATTMULTINOMIALDIST> EVALUATOR;

		std::cout << "Starting Example 33: Oscillating Search in very high-dimensional feature selection..." << std::endl;
	// randomly sample 50% of data for training and randomly sample (disjunct) 40% for independent testing of final classification performance 
		PSPLITTER dsp_outer(new SPLITTERRR(1, 50, 40)); // (there will be one outer randomized split only)
	// do not scale data
		boost::shared_ptr<FST::Data_Scaler<DATATYPE> > dsc(new FST::Data_Scaler_void<DATATYPE>());
	// set-up data access
		boost::shared_ptr<std::vector<PSPLITTER> > splitters(new std::vector<PSPLITTER>); splitters->push_back(dsp_outer);
		boost::shared_ptr<DATAACCESSOR> da(new DATAACCESSOR("data/reuters_apte.arff",splitters,dsc));
		da->initialize();
	// initiate access to split data parts
		da->setSplittingDepth(0); if(!da->getFirstSplit()) throw FST::fst_error("50/40 random data split failed.");
	// initiate the storage for subset to-be-selected
		boost::shared_ptr<SUBSET> sub(new SUBSET(da->getNoOfFeatures()));
	// set-up multinomial Bhattacharyya distance criterion
		boost::shared_ptr<BHATTMULTINOMIALDIST> dmultinom(new BHATTMULTINOMIALDIST);
		dmultinom->initialize(da); // (initialization = multinomial model parameter estimation on training data)
	// set-up individual feature ranking to serve as OS initialization
		FST::Search_BIF<RETURNTYPE,DIMTYPE,SUBSET,BHATTMULTINOMIALDIST> srch_bif;
	// set-up the standard sequential search step object (option: hybrid, ensemble, etc.)
		boost::shared_ptr<EVALUATOR> eval(new EVALUATOR);
	// set-up the Oscillating Search procedure in its fastest setting
		FST::Search_OS<RETURNTYPE,DIMTYPE,SUBSET,BHATTMULTINOMIALDIST,EVALUATOR> srch(eval);
		srch.set_delta(1);
	// target subset size must be set because a) Bhattacharyya is monotonous with respect to subset size,
	// b) in very-high-dimensional problem d-optimizing search is not feasible due to search complexity
		DIMTYPE target_subsize=500;
	// run the search - first find the initial subset by means of BIF, then improve it by means of OS
		std::cout << "Feature selection setup:" << std::endl << *da << std::endl << srch_bif << std::endl << srch << std::endl << *dmultinom << std::endl << std::endl;
		RETURNTYPE critval_train, critval_test;
		if(!srch_bif.search(target_subsize,critval_train,sub,dmultinom,std::cout)) throw FST::fst_error("Search (BIF) not finished.");
		std::cout << std::endl << "Initialization result: " << std::endl << *sub << "Criterion value=" << critval_train << std::endl << std::endl;
		if(!srch.search(target_subsize,critval_train,sub,dmultinom,std::cout)) throw FST::fst_error("Search (OS) not finished.");
		std::cout << std::endl << "Search result: " << std::endl << *sub << "Criterion value=" << critval_train << std::endl;
	// (optionally) validate result by estimating Naive Multinomial Bayes classifier accuracy on selected feature sub-space on independent test data
		boost::shared_ptr<CLASSIFIERMULTINOMIAL> cmultinom(new CLASSIFIERMULTINOMIAL);
		cmultinom->initialize(da);
		cmultinom->train(da,sub);
		cmultinom->test(critval_test,da);
		std::cout << "Validated Multinomial NaiveBayes accuracy=" << critval_test << std::endl << std::endl;
	}
	catch(FST::fst_error &e) {std::cerr<<"FST ERROR: "<< e.what() << ", code=" << e.code() << std::endl;}
	catch(std::exception &e) {std::cerr<<"non-FST ERROR: "<< e.what() << std::endl;}
	return 0;
}
Exemplo n.º 5
0
extern "C" void findclass(const char name[]) {
  Command c("findclass");
  if (name != NULL) {
    tty->print_cr("Finding class %s -> ", name);
    FindClassObjectClosure srch(name);
    Universe::heap()->permanent_object_iterate(&srch);
  }
}
Exemplo n.º 6
0
int hijo(char * input, sem_t * sp, char * words, sem_t * sp_c){
	int val;
	for(val=-1 ;val<1 ;sem_getvalue(sp_c,&val)){
		sem_wait(sp);
		srch(input,words);
		sem_post(sp_c);
		sem_wait(sp);
	}
	return 0;
}
Exemplo n.º 7
0
//---------------------------------------------------------------------------
//enum windows proc used to step through window titles
BOOL __stdcall TPKListWindows(HWND hWnd, LPARAM lparam)
{

	TPushKeys* caller=(TPushKeys*)lparam;

	//get window title
	char buf[255];
	GetWindowText(hWnd,buf,255);

	CString srch(buf);

	if (lstrlen(buf)==0)
	{
		return true;
	}


	//do match
	switch(caller->fWinMatch)
	{
		//exact match required (case sensitive)
	case wmExactMatch:
		if (srch==caller->fWindowTitle)
		{
			caller->TargetWindow=hWnd;
			return false;
		}
		break;

		//only starting characters need to match (case insensitive)
	case wmStartMatch:
		if (srch.UpperCase().Pos(caller->fWindowTitle.UpperCase())==1)
		{
			caller->TargetWindow=hWnd;
			return false;
		}
		break;

		//any sub string ok (case insensitive)
	case wmPartialMatch:
		if (srch.UpperCase().Pos(caller->fWindowTitle.UpperCase())!=0)
		{
			caller->TargetWindow=hWnd;
			return false;
		}
		break;
	}


	//continue looking
	return true;

}
Exemplo n.º 8
0
bool FragmentObject::find(const XMLCh* s,std::string&)  {
	bool retval = false;
	if (s != nullptr && fragment != nullptr) {
		u_str srch(pcu(s));
		u_str doc; 
		XML::Manager::parser()->writenode(fragment,doc);
		retval = doc.find(srch) != string::npos;
	} else {
		retval = true;
	}
	return retval;
}
Exemplo n.º 9
0
/*! A log-likelihood FFFear search is performed for the target in the given map.
  \param resultscr The best scores.
  \param resultrot The best rotations.
  \param resulttrn The best translations.
  \param xmap The map to search.
  \param rtops The oprientations to search. */
void LLK_map_target::search( clipper::Xmap<float>& resultscr, clipper::Xmap<int>& resultrot, clipper::Xmap<int>& resulttrn, const clipper::Xmap<float>& xmap, const std::vector<clipper::RTop_orth>& rtops ) const
{
  // set up results
  const clipper::Spacegroup&    spgr = xmap.spacegroup();
  const clipper::Cell&          cell = xmap.cell();
  const clipper::Grid_sampling& grid = xmap.grid_sampling();
  resultscr.init( spgr, cell, grid );
  resultrot.init( spgr, cell, grid );
  resulttrn.init( spgr, cell, grid );
  resultscr = 1.0e20;

  // now search for ML target in each orientation in turn
  clipper::Xmap<float> resultp1( clipper::Spacegroup::p1(), cell, grid );
  clipper::Xmap<float>::Map_reference_index i1(resultp1);
  clipper::Xmap<float>::Map_reference_coord ix(resultscr);

  // set up z scoring
  clipper::FFFear_fft<float> srch( xmap );
  clipper::NX_operator nxop( xmap, target, rtops[0] );
  srch( resultp1, target, weight, nxop );
  clipper::Map_stats zstats( resultp1 );

  // loop over orientations
  for ( int op = 0; op < rtops.size(); op++ ) {
    // do the fffear search
    clipper::NX_operator nxop( xmap, target, rtops[op].inverse() );
    srch( resultp1, target, weight, nxop );

    // store best scores
    for ( i1 = resultp1.first(); !i1.last(); i1.next() ) {
      ix.set_coord( i1.coord() );
      float score = ( resultp1[i1] - zstats.mean() ) / zstats.std_dev();
      if ( score < resultscr[ix] ) {
	resultscr[ix] = score;
	resultrot[ix] = op;
	resulttrn[ix] = grid.index( i1.coord() );
      }
    }
  }
}
Exemplo n.º 10
0
	/**
		@brief	フォルダ内のファイルを検索
		@param	tszPath		検索するパス
	 */
	void	searchFile( const _TCHAR* tszPath )
	{
		size_t nStart = m_vecFilelist.size();

		CStringSearch	srch(m_tszExt);
		tstring	tstrExt;

		while( srch.GetNext( tstrExt ) )
		{
			searchFileExt( tszPath, tstrExt.c_str(), nStart );
			
		}
	}
Exemplo n.º 11
0
/*************************************
 * Replace
 *************************************/
void SHVStringUTF8::Replace(const SHVStringUTF8C& search, const SHVStringUTF8C& repl)
{
SHVString8 self;
SHVString8C srch(search.GetBufferConst());
SHVString8C rpl(repl.GetBufferConst());

	self = SHVStringBuffer8::Encapsulate(Buffer);
	Buffer = NULL;

	self.Replace(srch,rpl);
	*this = SHVStringBufferUTF8::Encapsulate(self.Buffer);
	self.Buffer = NULL;
}
void main()
{
	int i,l1,l2,len,j,k,l,flag,flag2;
	cin >> n ;
	while(n--)
	{
		cin >> m ;
		min = 999999999;
		for( i = 0 ; i < m ; i++ )       //输入数据
		{
			cin >> segment[i].ch ;
			segment[i].len = strlen(segment[i].ch);
			segment[i].flag = 0 ; 
		}
		memset(save,0,sizeof(save));
		for( i = 0 ; i < m ; i++ )       //循环求save数组
			for( j = 0 ; j < m ; j++)
			{
				l1 = segment[i].len ;
				l2 = segment[j].len ;
				len = (l1 > l2)?l2:l1;
				flag2 = 0 ;
				for( k = len ; k >= 0 ; k-- )
				{
					flag = 0 ;	
					for( l = 0 ; l < k ; l++ )
					{
						if ( segment[i].ch[l] != segment[j].ch[l2-k+l] )
						{
							flag = 1 ;
							break;
						}
					}
					if ( flag ==0 )
					{
						save[j][i] = l1-k ;
						break;
					}
				}
			}
		for( i = 0 ; i < m ; i++)     //任取一个字符串作为第一个字符串
		{
			segment[i].flag = 1 ;
			srch(0,segment[i].len,i);
			segment[i].flag = 0 ;
		}
		cout << min << endl;         //输出最小值
	}
}
Exemplo n.º 13
0
int main()
{
       char str[50]="/home";

      // printf("Enter the name of the file to be searched: ");
       scanf("%s",srchstr);
//printf("hi//..\t\t");
       srch(str);

      if(flag==0)
       {
               printf("File not found.\n");
       }
       return;
}
Exemplo n.º 14
0
int main()
{
    int mass[N];
    int size,userNumberSrch;
    printf("vvedite razmer massiva:");
    scanf("%d",&size);
    universalClear();
    arr_in(mass,size);
    universalClear();
    arr_out(mass,size);
    printf("vvedite chislo dlia poiska :");
    scanf("%d",&userNumberSrch);
    srch(mass,size,userNumberSrch);
    return 0;
}
Exemplo n.º 15
0
/*
 * Enter admode() to search a specific addressing mode table
 * for a match. Return the addressing value on a match or
 * -1 for no match.
 */
int
admode(struct adsym *sp)
{
        char *ptr;
        int i;

        unget(getnb());
        i = 0;
        while ( *(ptr = &sp[i].a_str[0]) ) {
                if (srch(ptr)) {
                        return(sp[i].a_val);
                }
                i++;
        }
        return(-1);
}
//深度优先搜索求最小值
	// floor表示递归到达的深度,也是已经接入的字符串的个数
	//total表示当前总长度
	//last表示上一个被接入的字符串编号
void srch(int floor,int total,int last)  
{
	int i;
	if ( total >= min ) return ;
	if ( floor == m-1 ) 
	{
		min = total;
		return ;
	}
	for( i = 0 ;i < m; i++ )
	{
		if (segment[i].flag == 1 ) continue ;
		segment[i].flag = 1 ;
		srch(floor+1,total+save[last][i],i) ;
		segment[i].flag = 0 ;
	}
}
Exemplo n.º 17
0
void srch(char s[20])
{
struct dirent *d;
DIR *dir;
int c;
char ss[256]="\0";

if((dir=opendir(s))==NULL)
{
return;
}
       while(d=readdir(dir))
       {
//printf("Path is : %s/%s\n",s,d->d_name);

               if(!strcmp(d->d_name,srchstr))
               {
                       if(flag==0)
                       {
                               printf("File found.\n");
                               flag=1;
                       }
                      // printf("Inode number is : %d\n",d->d_ino);
                       printf("Path is : %s/%s\n",s,d->d_name);
               }

               if(strcmp(d->d_name,".") && strcmp(d->d_name,".."))
               {
                       strcpy(ss,s);
                       strcat(ss,"/");
                       strcat(ss,d->d_name);

                       srch(ss);
               }

       }

       if((c=closedir(dir))==-1)
       {
               printf("Unable to close file.\n");
               exit(1);
       }
       return;
}
Exemplo n.º 18
0
ImageBase* AddCacheImage(const CMString& file, int index)
{
	CMString tmpfile(file); tmpfile.AppendFormat(_T("#%d"), index);
	unsigned id = mir_hash(tmpfile.c_str(), tmpfile.GetLength() * sizeof(TCHAR));

	WaitForSingleObject(g_hMutexIm, 3000);

	ImageBase srch(id);
	ImageBase *img = g_imagecache.find(&srch);
	if (img == NULL) {
		int ind = file.ReverseFind('.');
		if (ind == -1)
			return NULL;

		CMString ext = file.Mid(ind+1);
		ext.MakeLower();
		if (ext == _T("dll") || ext == _T("exe"))
			img = opt.HQScaling ? (ImageBase*)new ImageType(id, file, index, icoDll) : (ImageBase*)new IconType(id, file, index, icoDll);
		else if (ext == _T("ico"))
			img = opt.HQScaling ? (ImageBase*)new ImageType(id, file, 0, icoFile) : (ImageBase*)new IconType(id, file, 0, icoFile);
		else if (ext == _T("icl"))
			img = opt.HQScaling ? (ImageBase*)new ImageType(id, file, index, icoIcl) : (ImageBase*)new IconType(id, file, index, icoIcl);
		else if (ext == _T("gif"))
			img = new ImageType(id, file, NULL);
		else if (fei == NULL || ext == _T("tif") || ext == _T("tiff"))
			img = new ImageType(id, file, NULL);
		else
			img = opt.HQScaling ? (ImageBase*)new ImageType(id, file, NULL) : (ImageBase*)new ImageFType(id, file);

		g_imagecache.insert(img);

		if (timerId == 0) {
			timerId = 0xffffffff;
			CallFunctionAsync(sttMainThreadCallback, NULL);
		}
	}
	else img->AddRef();

	ReleaseMutex(g_hMutexIm);

	return img;
}
Exemplo n.º 19
0
int main()
{
	int i = 0,j,k;
	memset(w,0,sizeof(w));
	memset(b,0,sizeof(b));
	char c[50];
	for(i = 1; i <= 17; i++)
	{
		int tmp = 2;
		scanf("%s",c);
		j = i/2;		
		if( i%2 == 0)
			for(k = 1; k <= 8; k++)
			{
				map[j][k] = c[tmp];
				tmp += 4;
			}
	}
	srch();
	print();
return 0;
}
Exemplo n.º 20
0
void
odot(Node *n, Node *r)
{
	char *s;
	Type *t;
	Node res;
	uvlong addr;

	s = n->sym->name;
	if(s == 0)
		fatal("dodot: no tag");

	expr(n->left, &res);
	if(res.comt == 0)
		error("no type specified for (expr).%s", s);

	if(res.type != TINT)
		error("pointer must be integer for (expr).%s", s);

	t = srch(res.comt, s);
	if(t == 0)
		error("no tag for (expr).%s", s);

	/* Propagate types */
	if(t->type) 
		r->comt = t->type->lt;
	
	addr = res.ival+t->offset;
	if(t->fmt == 'a') {
		r->op = OCONST;
		r->fmt = 'a';
		r->type = TINT;
		r->ival = addr;
	}
	else 
		indir(cormap, addr, t->fmt, r);

}
Exemplo n.º 21
0
/*! \section example61 Example 61: Feature selection that respects pre-specified feature weights.

In many applications it is desirable to optimize feature subsets not only with respect
to the primary objective (e.g., decision rule accuracy), but also with respect
to additional factors like known feature acquisition cost. In many cases there might be
only negligible difference in discriminatory ability among several features, while
the cost of measuring their value may differ a lot. In such a case it is certainly
better to select the cheaper feature. In other scenarios it might be even advantageous
to trade a minor degradation of classifcation accuracy for substantial saving in
measurement acquisition cost. For such cases FST3 implements a mechanism that
allows to control the feature accuracy vs. feature cost trade-off. It is made possible 
through result tracking and subsequent selection of alternative solution so as to minimize
the sum of pre-specified feature weights. The lower-weight solution is selected
from the pool of all known solutions that differ from the best one by less than
a user-specifed margin (permitted primary criterion value difference from the known
maximum value). In this example we illustrate how to add the respective mechanism to 
standard wrapper based feature selection. Here we select features so as to maximize 
3-Nearest Neighbor accuracy; then several lower-weight solutions are identified
and validated, for various margin values.
*/
int main()
{
	try{
	typedef double RETURNTYPE; 	typedef double DATATYPE;  typedef double REALTYPE;
	typedef unsigned int IDXTYPE;  typedef unsigned int DIMTYPE;  typedef short BINTYPE;
	typedef FST::Subset<BINTYPE, DIMTYPE> SUBSET;
	typedef FST::Data_Intervaller<std::vector<FST::Data_Interval<IDXTYPE> >,IDXTYPE> INTERVALLER;
	typedef boost::shared_ptr<FST::Data_Splitter<INTERVALLER,IDXTYPE> > PSPLITTER;
	typedef FST::Data_Splitter_CV<INTERVALLER,IDXTYPE> SPLITTERCV;
	typedef FST::Data_Splitter_5050<INTERVALLER,IDXTYPE> SPLITTER5050;
	typedef FST::Data_Accessor_Splitting_MemTRN<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for TRN data format
	//typedef FST::Data_Accessor_Splitting_MemARFF<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for ARFF data format
	typedef FST::Distance_Euclid<DATATYPE,DIMTYPE,SUBSET> DISTANCE;
	typedef FST::Classifier_kNN<RETURNTYPE,DATATYPE,IDXTYPE,DIMTYPE,SUBSET,DATAACCESSOR,DISTANCE> CLASSIFIERKNN;
	typedef FST::Criterion_Wrapper<RETURNTYPE,SUBSET,CLASSIFIERKNN,DATAACCESSOR> WRAPPERKNN;
	typedef FST::Criterion_Sum_Of_Weights<RETURNTYPE,DIMTYPE,SUBSET> WEIGHCRIT;
	typedef FST::Criterion_Negative<WEIGHCRIT,RETURNTYPE,SUBSET> NEGATIVECRIT;
	typedef FST::Sequential_Step_Straight<RETURNTYPE,DIMTYPE,SUBSET,WRAPPERKNN> EVALUATOR;
	typedef FST::Result_Tracker_Regularizer<RETURNTYPE,IDXTYPE,DIMTYPE,SUBSET,NEGATIVECRIT> TRACKER;

		std::cout << "Starting Example 61: Feature selection that respects pre-specified feature weights..." << std::endl;
	// keep second half of data for independent testing of final classification performance
		PSPLITTER dsp_outer(new SPLITTER5050());
	// in the course of search use the first half of data by 3-fold cross-validation in wrapper FS criterion evaluation
		PSPLITTER dsp_inner(new SPLITTERCV(3));
	// do not scale data
		boost::shared_ptr<FST::Data_Scaler<DATATYPE> > dsc(new FST::Data_Scaler_void<DATATYPE>());
	// set-up data access
		boost::shared_ptr<std::vector<PSPLITTER> > splitters(new std::vector<PSPLITTER>); 
		splitters->push_back(dsp_outer); splitters->push_back(dsp_inner);
		boost::shared_ptr<DATAACCESSOR> da(new DATAACCESSOR("data/speech_15.trn",splitters,dsc));
		da->initialize();
	// initiate access to split data parts
		da->setSplittingDepth(0); if(!da->getFirstSplit()) throw FST::fst_error("50/50 data split failed.");
		da->setSplittingDepth(1); if(!da->getFirstSplit()) throw FST::fst_error("3-fold cross-validation failure.");
	// initiate the storage for subset to-be-selected
		boost::shared_ptr<SUBSET> sub(new SUBSET(da->getNoOfFeatures()));  sub->deselect_all();
	// set-up 3-Nearest Neighbor classifier based on Euclidean distances
		boost::shared_ptr<CLASSIFIERKNN> cknn(new CLASSIFIERKNN); cknn->set_k(3);
	// wrap the 3-NN classifier to enable its usage as FS criterion (criterion value will be estimated by 3-fold cross-val.)
		boost::shared_ptr<WRAPPERKNN> wknn(new WRAPPERKNN);
		wknn->initialize(cknn,da);
	// set-up the standard sequential search step object (option: hybrid, ensemble, threaded, etc.)
		boost::shared_ptr<EVALUATOR> eval(new EVALUATOR);
	// set-up Sequential Forward Floating Selection search procedure
		FST::Search_SFFS<RETURNTYPE,DIMTYPE,SUBSET,WRAPPERKNN,EVALUATOR> srch(eval);
		srch.set_search_direction(FST::FORWARD);
	// set-up tracker of intermediate results
		boost::shared_ptr<TRACKER> tracker(new TRACKER);
	// register the result tracker with the used search step object
		eval->enable_result_tracking(tracker);
	// run the search
		std::cout << "Feature selection setup:" << std::endl << *da << std::endl << srch << std::endl << *wknn << std::endl << *tracker << std::endl << std::endl;
		RETURNTYPE critval_train, critval_test;
		if(!srch.search(0,critval_train,sub,wknn,std::cout)) throw FST::fst_error("Search not finished.");
	// (optionally) validate result by estimating kNN accuracy on selected feature sub-space on independent test data
		da->setSplittingDepth(0);
		cknn->train(da,sub);
		cknn->test(critval_test,da);
		if(!wknn->evaluate(critval_train,sub)) throw FST::fst_error("crit call failure.");
		std::cout << "Validated "<<cknn->get_k()<<"-NN accuracy=" << critval_test << ", crit value="<< critval_train << std::endl << std::endl;
	// set-up the secondary criterion to minimize the sum of feature weights
	// (note that the concrete weight values shown here are sample only)
		RETURNTYPE feature_cost[]={1, 1.2, 1, 1.3, 1.02, 2.4, 3.9, 1.2, 7.1, 22, 9.52, 1.08, 3.27, 1.44, 1.04};
		assert(sizeof(feature_cost)/sizeof(RETURNTYPE)==da->getNoOfFeatures());
		boost::shared_ptr<WEIGHCRIT> weightsumcrit(new WEIGHCRIT);
		weightsumcrit->initialize(da->getNoOfFeatures(),feature_cost);
		boost::shared_ptr<NEGATIVECRIT> critminw(new NEGATIVECRIT(weightsumcrit));
	// select final solution among those recorded by tracker (show more alternatives for various margins)
		for(unsigned int i=0; i<10; i++) 
		{
			const RETURNTYPE margin=(double)i*0.005;
			if(!tracker->optimize_within_margin(margin,critval_train,critval_test,sub,critminw)) throw FST::fst_error("tracker2->optimize_within_margin() failed.");
			std::cout << std::endl << "Weight-optimized result (primary criterion margin="<<margin<<"): " << std::endl << *sub << "Criterion value=" << critval_train << std::endl << "Sum of weights=" << -critval_test << std::endl;
		// (optionally) validate result by estimating kNN accuracy on selected feature sub-space on independent test data
			da->setSplittingDepth(0);
			cknn->train(da,sub);
			cknn->test(critval_test,da);
			std::cout << "Validated "<<cknn->get_k()<<"-NN accuracy=" << critval_test << std::endl << std::endl;
		}
	}
	catch(FST::fst_error &e) {std::cerr<<"FST ERROR: "<< e.what() << ", code=" << e.code() << std::endl;}
	catch(std::exception &e) {std::cerr<<"non-FST ERROR: "<< e.what() << std::endl;}
	return 0;
}
Exemplo n.º 22
0
/*! \section example51 Example 51: (DOS) Result regularization using secondary criterion.

It is known that feature selection may over-fit. As in the case of over-trained classifiers,
over-selected feature subsets may generalize poorly. This unwanted effect can lead to
serious degradation of generalization ability, i.e., model or decision-rule behavior 
on previously unknown data. It has been suggested (Raudys: Feature Over-Selection, LNCS 4109, 2006, 
or Somol et al., ICPR 2010) that preferring a subset with slightly-worse-than-maximal criterion
value can actually improve generalization. FST3 makes this possible through result tracking
and subsequent selection of alternative solution by means of secondary criterion maximization.
In this example we show a 3-Nearest Neighbor Wrapper based feature selection process, where
the final result is eventually chosen among a group of solutions close enough to the achieved
maximum, so as to optimize the secondary criterion. The group of solutions to select from is defined 
by means of a user-selected margin value (permitted primary criterion value difference from the known
maximum). In this case we show that even the simplest secondary criterion (mere preference of 
smaller subsets) can improve classifcation accuracy on previously unknown data.
*/
int main()
{
	try{
	typedef double RETURNTYPE; 	typedef double DATATYPE;  typedef double REALTYPE;
	typedef unsigned int IDXTYPE;  typedef unsigned int DIMTYPE;  typedef short BINTYPE;
	typedef FST::Subset<BINTYPE, DIMTYPE> SUBSET;
	typedef FST::Data_Intervaller<std::vector<FST::Data_Interval<IDXTYPE> >,IDXTYPE> INTERVALLER;
	typedef boost::shared_ptr<FST::Data_Splitter<INTERVALLER,IDXTYPE> > PSPLITTER;
	typedef FST::Data_Splitter_CV<INTERVALLER,IDXTYPE> SPLITTERCV;
	typedef FST::Data_Splitter_5050<INTERVALLER,IDXTYPE> SPLITTER5050;
	typedef FST::Data_Accessor_Splitting_MemTRN<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for TRN data format
	//typedef FST::Data_Accessor_Splitting_MemARFF<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for ARFF data format
	typedef FST::Distance_Euclid<DATATYPE,DIMTYPE,SUBSET> DISTANCE;
	typedef FST::Classifier_kNN<RETURNTYPE,DATATYPE,IDXTYPE,DIMTYPE,SUBSET,DATAACCESSOR,DISTANCE> CLASSIFIERKNN;
	typedef FST::Criterion_Wrapper<RETURNTYPE,SUBSET,CLASSIFIERKNN,DATAACCESSOR> WRAPPERKNN;
	typedef FST::Criterion_Subset_Size<RETURNTYPE,SUBSET> CRITSUBSIZE;
	typedef FST::Criterion_Negative<CRITSUBSIZE,RETURNTYPE,SUBSET> NEGATIVECRIT;
	typedef FST::Sequential_Step_Straight<RETURNTYPE,DIMTYPE,SUBSET,WRAPPERKNN> EVALUATOR;
	typedef FST::Result_Tracker_Regularizer<RETURNTYPE,IDXTYPE,DIMTYPE,SUBSET,NEGATIVECRIT> TRACKER;

		std::cout << "Starting Example 51: (DOS) Result regularization using secondary criterion..." << std::endl;
	// keep second half of data for independent testing of final classification performance
		PSPLITTER dsp_outer(new SPLITTER5050());
	// in the course of search use the first half of data by 3-fold cross-validation in wrapper FS criterion evaluation
		PSPLITTER dsp_inner(new SPLITTERCV(3));
	// do not scale data
		boost::shared_ptr<FST::Data_Scaler<DATATYPE> > dsc(new FST::Data_Scaler_void<DATATYPE>());
	// set-up data access
		boost::shared_ptr<std::vector<PSPLITTER> > splitters(new std::vector<PSPLITTER>); 
		splitters->push_back(dsp_outer); splitters->push_back(dsp_inner);
		boost::shared_ptr<DATAACCESSOR> da(new DATAACCESSOR("data/waveform_40.trn",splitters,dsc));
		da->initialize();
	// initiate access to split data parts
		da->setSplittingDepth(0); if(!da->getFirstSplit()) throw FST::fst_error("50/50 data split failed.");
		da->setSplittingDepth(1); if(!da->getFirstSplit()) throw FST::fst_error("3-fold cross-validation failure.");
	// initiate the storage for subset to-be-selected
		boost::shared_ptr<SUBSET> sub(new SUBSET(da->getNoOfFeatures()));  sub->deselect_all();
	// set-up 3-Nearest Neighbor classifier based on Euclidean distances
		boost::shared_ptr<CLASSIFIERKNN> cknn(new CLASSIFIERKNN); cknn->set_k(3);
	// wrap the 3-NN classifier to enable its usage as FS criterion (criterion value will be estimated by 3-fold cross-val.)
		boost::shared_ptr<WRAPPERKNN> wknn(new WRAPPERKNN);
		wknn->initialize(cknn,da);
	// set-up the standard sequential search step object (option: hybrid, ensemble)
		boost::shared_ptr<EVALUATOR> eval(new EVALUATOR);
	// set-up Dynamic Oscillating Search procedure
		FST::Search_DOS<RETURNTYPE,DIMTYPE,SUBSET,WRAPPERKNN,EVALUATOR> srch(eval);
		srch.set_delta(3);
	// set-up the regularizing result tracker
		boost::shared_ptr<TRACKER> tracker(new TRACKER);
	// register the result tracker with the used search step object
		eval->enable_result_tracking(tracker);
	// run the search
		std::cout << "Feature selection setup:" << std::endl << *da << std::endl << srch << std::endl << *wknn << std::endl << *tracker << std::endl << std::endl;
		RETURNTYPE critval_train, critval_test;
		srch.set_output_detail(FST::NORMAL); // set FST::SILENT to disable all text output in the course of search (FST::NORMAL is default)
		if(!srch.search(0,critval_train,sub,wknn,std::cout)) throw FST::fst_error("Search not finished.");
	// (optionally) validate result by estimating kNN accuracy on selected feature sub-space on independent test data
		da->setSplittingDepth(0);
		cknn->train(da,sub);
		cknn->test(critval_test,da);
		std::cout << "Validated "<<cknn->get_k()<<"-NN accuracy=" << critval_test << std::endl << std::endl;
	// set-up the secondary criterion (regularization criterion); in this case to minimize subset size
		boost::shared_ptr<CRITSUBSIZE> critsubsiz(new CRITSUBSIZE); //Criterion_Subset_Size does not need to be initialized
		boost::shared_ptr<NEGATIVECRIT> regulcrit(new NEGATIVECRIT(critsubsiz)); //Criterion_Negative does not need to be initialized
	// select final solution among those recorded by tracker (show more alternatives for various margins)
		tracker->set_output_detail(FST::NORMAL); // set FST::SILENT to disable all text output in the course of search (FST::NORMAL is default)
		for(unsigned int i=1; i<10; i++) 
		{
			RETURNTYPE margin=(double)i*0.001;
			da->setSplittingDepth(1); // necessary with criteria than need access to training data
			if(!tracker->optimize_within_margin(margin,critval_train,critval_test,sub,regulcrit)) throw FST::fst_error("tracker->optimize_within_margin() failed.");
			std::cout << std::endl << "Regularized (margin="<<margin<<") result: " << std::endl << *sub << "Criterion value=" << critval_train << std::endl;
		// (optionally) validate result by estimating kNN accuracy on selected feature sub-space on independent test data
			da->setSplittingDepth(0);
			cknn->train(da,sub);
			cknn->test(critval_test,da);
			std::cout << "Validated "<<cknn->get_k()<<"-NN accuracy=" << critval_test << std::endl << std::endl;
		}
	}
	catch(FST::fst_error &e) {std::cerr<<"FST ERROR: "<< e.what() << ", code=" << e.code() << std::endl;}
	catch(std::exception &e) {std::cerr<<"non-FST ERROR: "<< e.what() << std::endl;}
	return 0;
}
Exemplo n.º 23
0
/*! \section example12t Example 12t: Threaded SVM-wrapper-based feature selection with Dynamic Oscillating Search.

Dynamic Oscillating Search is a d-optimizing procedure that adjusts selected
subset size in the course of search. It is a generalization of the Oscillating
Search idea, which proved to be useful in various feature selection contexts.
Here we demonstrate it in multi-threaded configuration (using \c Sequential_Step_Straight_Threaded
instead of \c Sequential_Step_Straight evaluator object). In this example
due to the use of very complex feature selection criterion (SVM Wrapper)
the speed gain due to multithreading is substantial. In this example features are selected 
on 40-dimensional \e waveform data with 3-fold cross-validated SVM wrapper 
as criterion on the first 50% of data samples. The final classification performance 
on the selected subspace is eventually validated on the second 50% of data.

\note The maximum permitted number of threads to run at once is 
to be user-specified with respect to hardware capabilities.
*/
int main()
{
	try{
	const unsigned int max_threads=2;
	typedef double RETURNTYPE; 	typedef double DATATYPE;  typedef double REALTYPE;
	typedef unsigned int IDXTYPE;  typedef unsigned int DIMTYPE;  typedef short BINTYPE;
	typedef FST::Subset<BINTYPE, DIMTYPE> SUBSET;
	typedef FST::Data_Intervaller<std::vector<FST::Data_Interval<IDXTYPE> >,IDXTYPE> INTERVALLER;
	typedef boost::shared_ptr<FST::Data_Splitter<INTERVALLER,IDXTYPE> > PSPLITTER;
	typedef FST::Data_Splitter_CV<INTERVALLER,IDXTYPE> SPLITTERCV;
	typedef FST::Data_Splitter_5050<INTERVALLER,IDXTYPE> SPLITTER5050;
	typedef FST::Data_Accessor_Splitting_MemTRN<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR;
	typedef FST::Classifier_LIBSVM<RETURNTYPE,IDXTYPE,DIMTYPE,SUBSET,DATAACCESSOR> CLASSIFIERSVM;
	typedef FST::Criterion_Wrapper<RETURNTYPE,SUBSET,CLASSIFIERSVM,DATAACCESSOR> WRAPPERSVM;
	typedef FST::Sequential_Step_Straight_Threaded<RETURNTYPE,DIMTYPE,SUBSET,WRAPPERSVM,max_threads> EVALUATOR;

		std::cout << std::endl << "Starting Example 12t: Threaded SVM-wrapper-based feature selection with Dynamic Oscillating Search..." << std::endl;
	// keep second half of data for independent testing of final classification performance
		PSPLITTER dsp_outer(new SPLITTER5050());
	// in the course of search use the first half of data by 3-fold cross-validation in wrapper FS criterion evaluation
		PSPLITTER dsp_inner(new SPLITTERCV(3));
	// do not scale data
		boost::shared_ptr<FST::Data_Scaler<DATATYPE> > dsc(new FST::Data_Scaler_void<DATATYPE>());
	// set-up data access
		boost::shared_ptr<std::vector<PSPLITTER> > splitters(new std::vector<PSPLITTER>); 
		splitters->push_back(dsp_outer); splitters->push_back(dsp_inner);
		boost::shared_ptr<DATAACCESSOR> da(new DATAACCESSOR("data/waveform_40.trn",splitters,dsc));
		da->initialize();
	// initiate access to split data parts
		da->setSplittingDepth(0); if(!da->getFirstSplit()) throw FST::fst_error("50/50 data split failed.");
		da->setSplittingDepth(1); if(!da->getFirstSplit()) throw FST::fst_error("3-fold cross-validation failure.");
	// initiate the storage for subset to-be-selected
		boost::shared_ptr<SUBSET> sub(new SUBSET(da->getNoOfFeatures()));
	// set-up SVM (interface to external library LibSVM)
		boost::shared_ptr<CLASSIFIERSVM> csvm(new CLASSIFIERSVM);
		csvm->set_kernel_type(RBF); // (option: LINEAR, POLY, SIGMOID)
		csvm->initialize(da);
	// wrap the SVM classifier to enable its usage as FS criterion (criterion value will be estimated by 3-fold cross-val.)
		boost::shared_ptr<WRAPPERSVM> wsvm(new WRAPPERSVM);
		wsvm->initialize(csvm,da);
	// set-up the threaded sequential search step object (option: hybrid, ensemble, etc.)
		boost::shared_ptr<EVALUATOR> eval(new EVALUATOR);
	// set-up Dynamic Oscillating Search procedure
		FST::Search_DOS<RETURNTYPE,DIMTYPE,SUBSET,WRAPPERSVM,EVALUATOR> srch(eval);
		srch.set_delta(3);
	// first optimize SVM parameters using 3-fold cross-validation on training data on the full set of features
		sub->select_all();
		csvm->optimize_parameters(da,sub);
	// run the search
		std::cout << "Feature selection setup:" << std::endl << *da << std::endl << srch << std::endl << *wsvm << std::endl << std::endl;
		sub->deselect_all(); // let DOS start from an empty set (any starting subset is permissible)
		RETURNTYPE critval_train, critval_test;
		if(!srch.search(0,critval_train,sub,wsvm,std::cout)) throw FST::fst_error("Search not finished.");
	// (optionally) the following line is included here just for illustration because srch.search() reports results in itself
		std::cout << std::endl << "Search result: " << std::endl << *sub << std::endl << "Criterion value=" << critval_train << std::endl << std::endl;
	// (optionally) validate result by estimating SVM accuracy on selected feature sub-space on independent test data
		da->setSplittingDepth(0);
		csvm->train(da,sub);
		csvm->test(critval_test,da);
		std::cout << "Validated SVM accuracy=" << critval_test << std::endl << std::endl;
	}
	catch(FST::fst_error &e) {std::cerr<<"FST ERROR: "<< e.what() << ", code=" << e.code() << std::endl;}
	catch(std::exception &e) {std::cerr<<"non-FST ERROR: "<< e.what() << std::endl;}
	return 0;
}
Exemplo n.º 24
0
/*! \section example21 Example 21: Generalized sequential feature subset search.

All sequential search algorithms (SFS, SFFS, OS, DOS, SFRS) can be extended to operate in "generalized" setting
(term coined in Devijver, Kittler book). In each step of a generalized sequential
search algorithm not only one best feature is added to current subset nor one worst
feature is removed from current subset; instead, g-tuples of features are considered.
Searching for such group of g features that improves the current subset the most when added
(or such that degrades the current subset the least when removed) is more computationally
complex but increases the chance of finding the optimum or a result closer to optimum 
(nevertheless, improvement is not guaranteed and in some cases the result can actually degrade). 
The value g is to be set by user; the higher the value g, the slower the search 
(time complexity increases exponentially with increasing g). Note that setting g equal 
to the number of all features would effectively emulate the operation of exhaustive search.
In this example features are selected using the generalized (G)SFFS algorithm (G=2) and 3-NN wrapper 
classification accuracy as FS criterion. Classification accuracy (i.e, FS wrapper criterion value) is 
estimated on the first 50% of data samples by means of 3-fold cross-validation. The final classification 
performance on the selected subspace is eventually validated on the second 50% of data. (G)SFFS is called
here in d-optimizing setting, invoked by parameter 0 in search(0,...), which is otherwise
used to specify the required subset size.

\note Note that in this context the term generalization does not! relate to classification performance 
on independent data.

*/
int main()
{
	try{
	typedef double RETURNTYPE; 	typedef double DATATYPE;  typedef double REALTYPE;
	typedef unsigned int IDXTYPE;  typedef unsigned int DIMTYPE;  typedef short BINTYPE;
	typedef FST::Subset<BINTYPE, DIMTYPE> SUBSET;
	typedef FST::Data_Intervaller<std::vector<FST::Data_Interval<IDXTYPE> >,IDXTYPE> INTERVALLER;
	typedef boost::shared_ptr<FST::Data_Splitter<INTERVALLER,IDXTYPE> > PSPLITTER;
	typedef FST::Data_Splitter_CV<INTERVALLER,IDXTYPE> SPLITTERCV;
	typedef FST::Data_Splitter_5050<INTERVALLER,IDXTYPE> SPLITTER5050;
	typedef FST::Data_Accessor_Splitting_MemTRN<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for TRN data format
	//typedef FST::Data_Accessor_Splitting_MemARFF<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for ARFF data format
	typedef FST::Distance_Euclid<DATATYPE,DIMTYPE,SUBSET> DISTANCE;
	typedef FST::Classifier_kNN<RETURNTYPE,DATATYPE,IDXTYPE,DIMTYPE,SUBSET,DATAACCESSOR,DISTANCE> CLASSIFIERKNN;
	typedef FST::Criterion_Wrapper<RETURNTYPE,SUBSET,CLASSIFIERKNN,DATAACCESSOR> WRAPPERKNN;
	typedef FST::Sequential_Step_Straight<RETURNTYPE,DIMTYPE,SUBSET,WRAPPERKNN> EVALUATOR;

		std::cout << "Starting Example 21: Generalized sequential feature subset search..." << std::endl;
	// keep second half of data for independent testing of final classification performance
		PSPLITTER dsp_outer(new SPLITTER5050());
	// in the course of search use the first half of data by 3-fold cross-validation in wrapper FS criterion evaluation
		PSPLITTER dsp_inner(new SPLITTERCV(3));
	// do not scale data
		boost::shared_ptr<FST::Data_Scaler<DATATYPE> > dsc(new FST::Data_Scaler_void<DATATYPE>());
	// set-up data access
		boost::shared_ptr<std::vector<PSPLITTER> > splitters(new std::vector<PSPLITTER>); 
		splitters->push_back(dsp_outer); splitters->push_back(dsp_inner);
		boost::shared_ptr<DATAACCESSOR> da(new DATAACCESSOR("data/speech_15.trn",splitters,dsc));
		da->initialize();
	// initiate access to split data parts
		da->setSplittingDepth(0); if(!da->getFirstSplit()) throw FST::fst_error("50/50 data split failed.");
		da->setSplittingDepth(1); if(!da->getFirstSplit()) throw FST::fst_error("3-fold cross-validation failure.");
	// initiate the storage for subset to-be-selected
		boost::shared_ptr<SUBSET> sub(new SUBSET(da->getNoOfFeatures()));  sub->deselect_all();
	// set-up 3-Nearest Neighbor classifier based on Euclidean distances
		boost::shared_ptr<CLASSIFIERKNN> cknn(new CLASSIFIERKNN); cknn->set_k(5);
	// wrap the 3-NN classifier to enable its usage as FS criterion (criterion value will be estimated by 3-fold cross-val.)
		boost::shared_ptr<WRAPPERKNN> wknn(new WRAPPERKNN);
		wknn->initialize(cknn,da);
	// set-up the standard sequential search step object (option: hybrid, ensemble, etc.)
		boost::shared_ptr<EVALUATOR> eval(new EVALUATOR);
	// set-up Sequential Forward Floating Selection search procedure
		FST::Search_SFFS<RETURNTYPE,DIMTYPE,SUBSET,WRAPPERKNN,EVALUATOR> srch(eval);
		srch.set_search_direction(FST::FORWARD); // try FST::BACKWARD
	// set the size of feature groups to be evaluated for inclusion/removal in each sequential step (can be applied to SFS, SFFS, OS, DOS, SFRS)
		srch.set_generalization_level(2);
	// run the search
		std::cout << "Feature selection setup:" << std::endl << *da << std::endl << srch << std::endl << *wknn << std::endl << std::endl;
		RETURNTYPE critval_train, critval_test;
		srch.set_output_detail(FST::NORMAL); // set FST::SILENT to disable all text output in the course of search (FST::NORMAL is default)
		if(!srch.search(0,critval_train,sub,wknn,std::cout)) throw FST::fst_error("Search not finished.");
	// (optionally) validate result by estimating kNN accuracy on selected feature sub-space on independent test data
		da->setSplittingDepth(0);
		cknn->train(da,sub);
		cknn->test(critval_test,da);
		std::cout << "Validated "<<cknn->get_k()<<"-NN accuracy=" << critval_test << std::endl << std::endl;
	// (optionally) list the best known solutions for each cardinality as recorded throughout the course of search
		std::cout << "Best recorded solution for subset size:" << std::endl;
		for(DIMTYPE d=1;d<=sub->get_n();d++) 
		if(srch.get_result(d,critval_train,sub)) std::cout << d << ": val="<< critval_train << ", "<<*sub << std::endl;
	}
	catch(FST::fst_error &e) {std::cerr<<"FST ERROR: "<< e.what() << ", code=" << e.code() << std::endl;}
	catch(std::exception &e) {std::cerr<<"non-FST ERROR: "<< e.what() << std::endl;}
	return 0;
}
Exemplo n.º 25
0
/** foreach function, GHFunc, print result if we find match */
void fe_srch(gpointer key, gpointer val, gpointer udata) {
    gboolean res = srch(key, val, udata);
    if (res == TRUE) {
	printf("Found key=%s\n", (char*)key);
    }
}
Exemplo n.º 26
0
void main()
{
    char choice;
    char ans = 'n';
    int key;
    NodeT *root, *tmp;
    root = NULL;

    do
    {
        printf("\n1.For Insert Press: i");
        printf("\n2.For Find Press:   f");
        printf("\n3.For Delete Press: d");
        printf("\n4.For Show Press:   s");
        printf("\n5.For Quit Press:   q");
        printf("\n\nEnter your choice: ");
        scanf("%c", &choice);

        switch (choice)
        {
        case 'i':
            do
            {
                printf("\nEnter The Element: ");
                scanf("%d", &key);
                root=insert(root, key);
                printf("\nWant To Enter More Elements?(y/n)");
                ans = getch();
            }
            while (ans == 'y');
            break;

        case 'f':
            do
            {
                printf("\nEnter Element to be searched :");
                scanf("%d", &key);
                tmp = srch(root,key);
                printf("\nAdress of node %d is %d", key, tmp);
                printf("\nWant To Find More Elements?(y/n)");
                ans = getch();
            }
            while (ans == 'y');
            break;

        case 'd':
            do
            {
                printf("\nEnter Element to be deleted : ");
                scanf("%d",&key);
                tmp = del(root,key) ;
                printf("The Element %d has been deleted",&key);
                printf("\nWant To Delete More Elements?(y/n)");
                ans = getch();
            }
            while (ans == 'y');
            break;

        case 's':
            if (root == NULL)
                printf("Tree Is Not Created");
            else
            {
                printf("\nThe Preorder display : \n");
                preorder(root,0);
                printf("\nThe Inorder display : \n");
                inorder(root,0);
                printf("\nThe Postorder display : \n");
                postorder(root,0);

            }
            break;
        }
    }
    while (choice != 'q');
}