static VALUE *eval(void) { VALUE *l, *r; l = eval1(); while (nextarg("|")) { G.args++; r = eval1(); if (null(l)) { freev(l); l = r; } else freev(r); } return l; }
mat2r Fresnel::jacobian1(real a, real s) { // d d // -- C(a,s) -- C(a,s) // da ds // // d d // -- S(a,s) -- S(a,s) // da ds const real theta = s * (rl(1) + rl(0.5) * a * s); const real c0 = a * s - 1; const real c1 = rl(1) / (rl(2) * a * a); // TODO: This is disastrous vec2r f = eval1(a, s); // = [ C(a, s), S(a, s) ] real cas = f.x, sas = f.y; real cos_theta = cos(theta), sin_theta = sin(theta); real dC_da = c1 * (rl(1) + c0 * cos_theta - sas - a * cas); real dS_da = c1 * ( c0 * sin_theta + cas - a * sas); real dC_ds = cos_theta - rl(1); real dS_ds = sin_theta; return mat2r( dC_da, dC_ds, dS_da, dS_ds ); }
/* * Allocate a variable for a List and fill it from "*arg". * Return OK or FAIL. */ int get_list_tv(char_u **arg, typval_T *rettv, int evaluate) { list_T *l = NULL; typval_T tv; listitem_T *item; if (evaluate) { l = list_alloc(); if (l == NULL) return FAIL; } *arg = skipwhite(*arg + 1); while (**arg != ']' && **arg != NUL) { if (eval1(arg, &tv, evaluate) == FAIL) /* recursive! */ goto failret; if (evaluate) { item = listitem_alloc(); if (item != NULL) { item->li_tv = tv; item->li_tv.v_lock = 0; list_append(l, item); } else clear_tv(&tv); } if (**arg == ']') break; if (**arg != ',') { EMSG2(_("E696: Missing comma in List: %s"), *arg); goto failret; } *arg = skipwhite(*arg + 1); } if (**arg != ']') { EMSG2(_("E697: Missing end of List ']': %s"), *arg); failret: if (evaluate) list_free(l); return FAIL; } *arg = skipwhite(*arg + 1); if (evaluate) rettv_list_set(rettv, l); return OK; }
/* Parse and evaluate | expressions */ struct val * eval0(void) { struct val *l, *r; l = eval1(); while (token == OR) { nexttoken(0); r = eval1(); if (is_zero_or_null(l)) { free_value(l); l = r; } else { free_value(r); } } return l; }
virtual real evalImp(std::vector<Argument>& arguments) { CHECK_EQ(arguments.size(), (size_t)2); IVectorPtr& output = arguments[0].ids; IVectorPtr& label = arguments[1].ids; CHECK(!output->useGpu() && !label->useGpu()) << "Not supported"; auto sequenceStartPositions = arguments[1].sequenceStartPositions->getVector(false); CHECK_EQ(output->getSize(), label->getSize()); CHECK(sequenceStartPositions); size_t numSequences = sequenceStartPositions->getSize() - 1; const int* starts = sequenceStartPositions->getData(); for (size_t i = 0; i < numSequences; ++i) { eval1(output->getData() + starts[i], label->getData() + starts[i], starts[i + 1] - starts[i]); } return 0; }
void DatasetTensor::createLogTensors() { qDebug() << "create log tensors..."; int blockSize = m_data.size(); m_logData.resize( blockSize ); std::vector<QVector3D> evec1( blockSize ); std::vector<float> eval1( blockSize ); std::vector<QVector3D> evec2( blockSize ); std::vector<float> eval2( blockSize ); std::vector<QVector3D> evec3( blockSize ); std::vector<float> eval3( blockSize ); FMath::evecs( m_data, evec1, eval1, evec2, eval2, evec3, eval3 ); //log(M) =Ulog(D)UT Matrix U(3,3); DiagonalMatrix D(3); Matrix logM(3,3); for ( unsigned int i = 0; i < m_logData.size(); ++i ) { U( 1, 1 ) = evec1[i].x(); U( 2, 1 ) = evec1[i].y(); U( 3, 1 ) = evec1[i].z(); U( 1, 2 ) = evec2[i].x(); U( 2, 2 ) = evec2[i].y(); U( 3, 2 ) = evec2[i].z(); U( 1, 3 ) = evec3[i].x(); U( 2, 3 ) = evec3[i].y(); U( 3, 3 ) = evec3[i].z(); D(1) = log( eval1[i] ); D(2) = log( eval2[i] ); D(3) = log( eval3[i] ); logM = U*D*U.t(); m_logData[i] = logM; } qDebug() << "create log tensors done!"; }
/*------------------------------------------------------------------ / / eval_user - To add your own eval function, called, for instance / my_eval, / / 1. add the function my_eval(int,double*) to the bottom of this file / / 2. put the prototype in eval.h / / 3. in eval_user add the lines as in the sample below, / / 4. recompile (make pga) / / 5. change the first line in GAin.dat to my_eval / / 6. set the number and range of parameters at the bottom of GAin.dat / / 7. run pga / /------------------------------------------------------------------*/ double eval_user(int iniche,char *f,int n, double* params) { double fitness; if(!strcmp(f,"eval0")) fitness=eval0(n,params); else if(!strcmp(f,"eval1")) fitness=eval1(n,params); /* sample new lines ..._____________ else if(!strcmp(f,"my_eval")) fitness=my_eval(n,params); ..._________________________________*/ else { fprintf(stdout,"You have specified a bogus function name: %s\n",f); exit(0); } return fitness; }
int8_t rules1Step() { err = ERR_OK; int sz = getSize(); if (sz == 0 || err != ERR_OK) return 0; uint16_t a = 0; for (;;) { uint8_t len = get(a); // uint8_t tag = get(a + 1); uint8_t on = get(a + 2); uint8_t reg = get(a + 3); if (!len) break; uint8_t a1 = a + len; if (on) { a += 4; while (a < a1) { uint8_t l = get(a); int val = eval1(a + 1, l - 1); if (err == ERR_OK) { setReg(reg, val); break; } a += l; } } a = a1; } return 1; }
Value vexec(Value in) { int i; Value x; if(in < CACHE && in >= 0 && cache[in] < 0) { dcval = cache[in]&DCBIT; return cache[in]&VALBIT; } x = in; for(i=1; i<=ndepends; i++) { hp = depends[i]; hp->val = x & 1; x >>= 1; } x = eval1(); if(in < CACHE && in >= 0) cache[in] = 0x80 | x; dcval = (x & 2) ? 1 : 0; return (x & 1); }
/* * Allocate a variable for a Dictionary and fill it from "*arg". * Return OK or FAIL. Returns NOTDONE for {expr}. */ int get_dict_tv(char_u **arg, typval_T *rettv, int evaluate) { dict_T *d = NULL; typval_T tvkey; typval_T tv; char_u *key = NULL; dictitem_T *item; char_u *start = skipwhite(*arg + 1); char_u buf[NUMBUFLEN]; /* * First check if it's not a curly-braces thing: {expr}. * Must do this without evaluating, otherwise a function may be called * twice. Unfortunately this means we need to call eval1() twice for the * first item. * But {} is an empty Dictionary. */ if (*start != '}') { if (eval1(&start, &tv, FALSE) == FAIL) /* recursive! */ return FAIL; if (*start == '}') return NOTDONE; } if (evaluate) { d = dict_alloc(); if (d == NULL) return FAIL; } tvkey.v_type = VAR_UNKNOWN; tv.v_type = VAR_UNKNOWN; *arg = skipwhite(*arg + 1); while (**arg != '}' && **arg != NUL) { if (eval1(arg, &tvkey, evaluate) == FAIL) /* recursive! */ goto failret; if (**arg != ':') { EMSG2(_("E720: Missing colon in Dictionary: %s"), *arg); clear_tv(&tvkey); goto failret; } if (evaluate) { key = get_tv_string_buf_chk(&tvkey, buf); if (key == NULL) { /* "key" is NULL when get_tv_string_buf_chk() gave an errmsg */ clear_tv(&tvkey); goto failret; } } *arg = skipwhite(*arg + 1); if (eval1(arg, &tv, evaluate) == FAIL) /* recursive! */ { if (evaluate) clear_tv(&tvkey); goto failret; } if (evaluate) { item = dict_find(d, key, -1); if (item != NULL) { EMSG2(_("E721: Duplicate key in Dictionary: \"%s\""), key); clear_tv(&tvkey); clear_tv(&tv); goto failret; } item = dictitem_alloc(key); clear_tv(&tvkey); if (item != NULL) { item->di_tv = tv; item->di_tv.v_lock = 0; if (dict_add(d, item) == FAIL) dictitem_free(item); } } if (**arg == '}') break; if (**arg != ',') { EMSG2(_("E722: Missing comma in Dictionary: %s"), *arg); goto failret; } *arg = skipwhite(*arg + 1); } if (**arg != '}') { EMSG2(_("E723: Missing end of Dictionary '}': %s"), *arg); failret: if (evaluate) dict_free(d); return FAIL; } *arg = skipwhite(*arg + 1); if (evaluate) { rettv->v_type = VAR_DICT; rettv->vval.v_dict = d; ++d->dv_refcount; } return OK; }
/*! \section example55 Example 55: Evaluating Similarity of Two Feature Selection Processes. To study the difference in feature preferences among principally different feature selection methods or among differently parametrized instances of the same method FST3 provides measures capable of evaluating the level of similarity between two sets of trials (Somol Novovicova, IEEE, TPAMI, 2010). In analogy to stability evaluation (see \ref example54) for each of the two feature selection scenarios a series of trials is conducted on various samplings of the same data. In this example ten feature selection trials are performed per scenario, each on randomly sampled 95% of the data. In the first scenario in each trial the resulting subset is obtained using DOS procedure, optimizing the 3-Nearest Neighbour accuracy estimated by means of 3-fold cross-validation. In the second scenario in each trial the resulting subset is obtained using SFFS procedure, maximizing the Bhattacharyya distance based on normal model. A selection of standard stability measures is evaluated separately for each of the two scenarios. Eventually the similarity of the two scenarios is evaluated using analogously founded similarity measures. All measures yield values from [0,1], where values close to 0 denote low stability/similarity and values close to 1 denote high stability/similarity. Note that in this experiment the inter-measures (IATI, ICW, IANHI) yield markedly lower values than the corresponding stability measures (ATI, CW, ANHI). This illustrates well that considerably different results can be expected from differently founded feature selection methods. */ int main() { try{ typedef double RETURNTYPE; typedef double DATATYPE; typedef double REALTYPE; typedef unsigned int IDXTYPE; typedef unsigned int DIMTYPE; typedef short BINTYPE; typedef FST::Subset<BINTYPE, DIMTYPE> SUBSET; typedef FST::Data_Intervaller<std::vector<FST::Data_Interval<IDXTYPE> >,IDXTYPE> INTERVALLER; typedef boost::shared_ptr<FST::Data_Splitter<INTERVALLER,IDXTYPE> > PSPLITTER; typedef FST::Data_Splitter_CV<INTERVALLER,IDXTYPE> SPLITTERCV; typedef FST::Data_Splitter_RandomRandom<INTERVALLER,IDXTYPE,BINTYPE> SPLITTERRANDRAND; typedef FST::Data_Accessor_Splitting_MemTRN<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for TRN data format //typedef FST::Data_Accessor_Splitting_MemARFF<DATATYPE,IDXTYPE,INTERVALLER> DATAACCESSOR; // uncomment for ARFF data format typedef FST::Distance_L1<DATATYPE,DIMTYPE,SUBSET> DISTANCEL1; typedef FST::Classifier_kNN<RETURNTYPE,DATATYPE,IDXTYPE,DIMTYPE,SUBSET,DATAACCESSOR,DISTANCEL1> CLASSIFIERKNN; typedef FST::Criterion_Wrapper<RETURNTYPE,SUBSET,CLASSIFIERKNN,DATAACCESSOR> WRAPPER; typedef FST::Sequential_Step_Straight<RETURNTYPE,DIMTYPE,SUBSET,WRAPPER> EVALUATOR1; typedef FST::Criterion_Normal_Bhattacharyya<RETURNTYPE,DATATYPE,REALTYPE,IDXTYPE,DIMTYPE,SUBSET,DATAACCESSOR> BHATTCRIT; typedef FST::Sequential_Step_Straight<RETURNTYPE,DIMTYPE,SUBSET,BHATTCRIT> EVALUATOR2; typedef FST::Result_Tracker_Stability_Evaluator<RETURNTYPE,IDXTYPE,DIMTYPE,SUBSET> TRACKER; std::cout << "Starting Example 55: Evaluating Similarity of Two Feature Selection Processes..." << std::endl; // set-up ten trials where in each 95% of data is randomly sampled PSPLITTER dsp_outer(new SPLITTERRANDRAND(10/*splits=trials*/,95,5)); // in the course of wrapper based feature subset search (in one trial) use 3-fold cross-validation PSPLITTER dsp_inner(new SPLITTERCV(3)); // do not scale data boost::shared_ptr<FST::Data_Scaler<DATATYPE> > dsc(new FST::Data_Scaler_void<DATATYPE>()); // set-up data access boost::shared_ptr<std::vector<PSPLITTER> > splitters(new std::vector<PSPLITTER>); splitters->push_back(dsp_outer); splitters->push_back(dsp_inner); boost::shared_ptr<DATAACCESSOR> da(new DATAACCESSOR("data/speech_15.trn",splitters,dsc)); da->initialize(); // initiate access to split data parts da->setSplittingDepth(0); if(!da->getFirstSplit()) throw FST::fst_error("RandRand data split failed."); da->setSplittingDepth(1); if(!da->getFirstSplit()) throw FST::fst_error("3-fold cross-validation failure."); // initiate the storage for subset to-be-selected boost::shared_ptr<SUBSET> sub(new SUBSET(da->getNoOfFeatures())); sub->deselect_all(); // set-up result trackers to collect results of each trial in both scenarios boost::shared_ptr<TRACKER> tracker1(new TRACKER); boost::shared_ptr<TRACKER> tracker2(new TRACKER); // FEATURE SELECTION SCENARIO A (wrapper) // set-up 3-Nearest Neighbor classifier based on L1 distances boost::shared_ptr<CLASSIFIERKNN> cknn1(new CLASSIFIERKNN); cknn1->set_k(3); // wrap the 3-NN classifier to enable its usage as FS criterion (criterion value will be estimated by 3-fold cross-val.) boost::shared_ptr<WRAPPER> wknn1(new WRAPPER); wknn1->initialize(cknn1,da); // set-up the standard sequential search step object (option: hybrid, ensemble, threaded) boost::shared_ptr<EVALUATOR1> eval1(new EVALUATOR1); // set-up Sequential Forward Floating Selection search procedure FST::Search_DOS<RETURNTYPE,DIMTYPE,SUBSET,WRAPPER,EVALUATOR1> srch1(eval1); srch1.set_delta(10); sub->deselect_all(); // Technical remark: should threaded evaluator be used in this case, it would be necessary to move both the evaluator and search procedure set-up // inside the trial loop. The reason is technical: threaded evaluator caches criterion clones, including data accessor state. // Therefore no outside changes in splitting level nor current split change can be reflected in criterion evaluation. Renewed // evaluator set-up resets the cache and thus ensures correct threaded criterion evaluation behavior after split change. // run the trials std::cout << "Feature selection setup:" << std::endl << *da << std::endl << *wknn1 << std::endl << *tracker1 << std::endl << std::endl; RETURNTYPE critval_train; da->setSplittingDepth(0); unsigned int trial=0; bool run=da->getFirstSplit(); if(!run) throw FST::fst_error("RandRand data split failed."); while(run) { trial++; std::cout << std::endl<<"TRIAL A"<<trial<< " ---------------------------------------------------------------------"<<std::endl; da->setSplittingDepth(1); if(!srch1.search(0,critval_train,sub,wknn1,std::cout)) throw FST::fst_error("Search not finished."); tracker1->add(critval_train,sub); std::cout << std::endl << "(TRIAL A"<<trial<<") Search result: " << std::endl << *sub << "Criterion value=" << critval_train << std::endl; da->setSplittingDepth(0); run=da->getNextSplit(); } // FEATURE SELECTION SCENARIO B (filter) // set-up normal Bhattacharyya distance criterion boost::shared_ptr<BHATTCRIT> cb(new BHATTCRIT); // set-up the standard sequential search step object (option: hybrid, ensemble, threaded) boost::shared_ptr<EVALUATOR2> eval2(new EVALUATOR2); // set-up Sequential Forward Floating Selection search procedure FST::Search_SFFS<RETURNTYPE,DIMTYPE,SUBSET,BHATTCRIT,EVALUATOR2> srch2(eval2); srch2.set_search_direction(FST::FORWARD); // target subset size must be set because Bhattacharyya is monotonous with respect to subset size (i.e., evaluates full set as the best) const DIMTYPE target_size=7; // run the trials std::cout << "Feature selection setup:" << std::endl << *da << std::endl << srch2 << std::endl << *cb << std::endl << *tracker2 << std::endl << std::endl; trial=0; da->setSplittingDepth(0); run=da->getFirstSplit(); if(!run) throw FST::fst_error("RandRand data split failed."); while(run) { trial++; std::cout << std::endl<<"TRIAL B"<<trial<< " ---------------------------------------------------------------------"<<std::endl; cb->initialize(da); // (note that cb initialization = normal model parameter estimation on training data, therefore it must be repeated for each split) da->setSplittingDepth(1); if(!srch2.search(target_size,critval_train,sub,cb,std::cout)) throw FST::fst_error("Search not finished."); tracker2->add(critval_train,sub); std::cout << std::endl << "(TRIAL B"<<trial<<") Search result: " << std::endl << *sub << "Criterion value=" << critval_train << std::endl; da->setSplittingDepth(0); run=da->getNextSplit(); } // evaluate stability of each scenario and similarity of the two scenarios using results collected by trackers std::cout<<std::endl; std::cout << "---------------------------------------------------------------------" << std::endl; std::cout << "Scenario A resulting criterion values' mean: " << tracker1->value_mean() << ", std. dev.: " << tracker1->value_stddev() << std::endl; std::cout << "Scenario A subset sizes' mean: " << tracker1->size_mean() << ", std. dev.: " << tracker1->size_stddev() << std::endl; std::cout << std::endl; std::cout << "Scenario A stability_ATI()=" << tracker1->stability_ATI() << std::endl; std::cout << "Scenario A stability_CW()=" << tracker1->stability_CW() << std::endl; std::cout << "Scenario A stability_ANHI("<<da->getNoOfFeatures()<<")=" << tracker1->stability_ANHI(da->getNoOfFeatures()) << std::endl; std::cout<<std::endl; std::cout << "Scenario B resulting criterion values' mean: " << tracker2->value_mean() << ", std. dev.: " << tracker2->value_stddev() << std::endl; std::cout << "Scenario B subset sizes' mean: " << tracker2->size_mean() << ", std. dev.: " << tracker2->size_stddev() << std::endl; std::cout << std::endl; std::cout << "Scenario B stability_ATI()=" << tracker2->stability_ATI() << std::endl; std::cout << "Scenario B stability_CW()=" << tracker2->stability_CW() << std::endl; std::cout << "Scenario B stability_ANHI("<<da->getNoOfFeatures()<<")=" << tracker2->stability_ANHI(da->getNoOfFeatures()) << std::endl; std::cout<<std::endl; std::cout << "Evaluating similarity between scenario A and scenario B:"<< std::endl; std::cout << "similarity measure IATI()=" << tracker1->similarity_IATI(*tracker2) << std::endl; std::cout << "similarity measure ICW()=" << tracker1->similarity_ICW(*tracker2) << std::endl; std::cout << "similarity measure IANHI("<<da->getNoOfFeatures()<<")=" << tracker1->similarity_IANHI(da->getNoOfFeatures(), *tracker2) << std::endl; } catch(FST::fst_error &e) {std::cerr<<"FST ERROR: "<< e.what() << ", code=" << e.code() << std::endl;} catch(std::exception &e) {std::cerr<<"non-FST ERROR: "<< e.what() << std::endl;} return 0; }