Пример #1
0
void NonDLHSSampling::post_run(std::ostream& s)
{
  //Statistics are generated here and output in NonDLHSSampling's
  // redefinition of print_results().
  if (statsFlag && !varBasedDecompFlag) // calculate statistics on allResponses
    compute_statistics(allSamples, allResponses);

  Iterator::post_run(s);
}
int main() {
  const char *lt_name = "data/lattice.2016";
  printf("reading \"%s\"...", lt_name); fflush(stdout);
  lattice *lt = lattice_read(lt_name);
  printf("\tOk.\n");

  compute_statistics(lt);
  
  lattice_free(lt);
}
Пример #3
0
// Build RMSD statistics from iteration models
void US_DmgaMcStats::build_rmsd_stats( int niters,
      QVector< US_Model >& imodels, QVector< double >& rstats )
{
   const int stsiz    = 23;
   QVector< double > rmsds;
   QVector< double > rconcs;

   rstats.fill( 0.0, stsiz );

   // Get statistics for RMSDs
   for ( int jj = 0; jj < niters; jj++ )
   {
      double rmsd      = sqrt( imodels[ jj ].variance );
      rmsds  << rmsd;
      rconcs << 1.0;
   }

   compute_statistics( niters, rmsds, rconcs, rstats );
qDebug() << "Dst:BRs:   iters" << niters << "RMSD min max mean median"
   << rstats[0] << rstats[1] << rstats[2] << rstats[3];
}
Пример #4
0
int main_Tokenize(int argc,char* const argv[]) {
if (argc==1) {
   usage();
   return 0;
}

char alphabet[FILENAME_MAX]="";
char token_file[FILENAME_MAX]="";

Encoding encoding_output = DEFAULT_ENCODING_OUTPUT;
int bom_output = DEFAULT_BOM_OUTPUT;
int mask_encoding_compatibility_input = DEFAULT_MASK_ENCODING_COMPATIBILITY_INPUT;
int val,index=-1;
int mode=NORMAL;
struct OptVars* vars=new_OptVars();
while (EOF!=(val=getopt_long_TS(argc,argv,optstring_Tokenize,lopts_Tokenize,&index,vars))) {
   switch(val) {
   case 'a': if (vars->optarg[0]=='\0') {
                fatal_error("You must specify a non empty alphabet file name\n");
             }
             strcpy(alphabet,vars->optarg);
             break;
   case 'c': mode=CHAR_BY_CHAR; break;
   case 'w': mode=NORMAL; break;
   case 't': if (vars->optarg[0]=='\0') {
                fatal_error("You must specify a non empty token file name\n");
             }
             strcpy(token_file,vars->optarg);
             break;
   case 'k': if (vars->optarg[0]=='\0') {
                fatal_error("Empty input_encoding argument\n");
             }
             decode_reading_encoding_parameter(&mask_encoding_compatibility_input,vars->optarg);
             break;
   case 'q': if (vars->optarg[0]=='\0') {
                fatal_error("Empty output_encoding argument\n");
             }
             decode_writing_encoding_parameter(&encoding_output,&bom_output,vars->optarg);
             break;
   case 'h': usage(); return 0;
   case ':': if (index==-1) fatal_error("Missing argument for option -%c\n",vars->optopt);
             else fatal_error("Missing argument for option --%s\n",lopts_Tokenize[index].name);
   case '?': if (index==-1) fatal_error("Invalid option -%c\n",vars->optopt);
             else fatal_error("Invalid option --%s\n",vars->optarg);
             break;
   }
   index=-1;
}

if (vars->optind!=argc-1) {
   fatal_error("Invalid arguments: rerun with --help\n");
}
U_FILE* text;
U_FILE* out;
U_FILE* output;
U_FILE* enter;
char tokens_txt[FILENAME_MAX];
char text_cod[FILENAME_MAX];
char enter_pos[FILENAME_MAX];
Alphabet* alph=NULL;

get_snt_path(argv[vars->optind],text_cod);
strcat(text_cod,"text.cod");
get_snt_path(argv[vars->optind],tokens_txt);
strcat(tokens_txt,"tokens.txt");
get_snt_path(argv[vars->optind],enter_pos);
strcat(enter_pos,"enter.pos");
text=u_fopen_existing_versatile_encoding(mask_encoding_compatibility_input,argv[vars->optind],U_READ);
if (text==NULL) {
   fatal_error("Cannot open text file %s\n",argv[vars->optind]);
}
if (alphabet[0]!='\0') {
   alph=load_alphabet(alphabet);
   if (alph==NULL) {
      error("Cannot load alphabet file %s\n",alphabet);
      u_fclose(text);
      return 1;
   }
}
out=u_fopen(BINARY,text_cod,U_WRITE);
if (out==NULL) {
   error("Cannot create file %s\n",text_cod);
   u_fclose(text);
   if (alph!=NULL) {
      free_alphabet(alph);
   }
   return 1;
}
enter=u_fopen(BINARY,enter_pos,U_WRITE);
if (enter==NULL) {
   error("Cannot create file %s\n",enter_pos);
   u_fclose(text);
   u_fclose(out);
   if (alph!=NULL) {
      free_alphabet(alph);
   }
   return 1;
}


vector_ptr* tokens=new_vector_ptr(4096);
vector_int* n_occur=new_vector_int(4096);
vector_int* n_enter_pos=new_vector_int(4096);
struct hash_table* hashtable=new_hash_table((HASH_FUNCTION)hash_unichar,(EQUAL_FUNCTION)u_equal,
                                            (FREE_FUNCTION)free,NULL,(KEYCOPY_FUNCTION)keycopy);
if (token_file[0]!='\0') {
   load_token_file(token_file,mask_encoding_compatibility_input,tokens,hashtable,n_occur);
}

output=u_fopen_creating_versatile_encoding(encoding_output,bom_output,tokens_txt,U_WRITE);
if (output==NULL) {
   error("Cannot create file %s\n",tokens_txt);
   u_fclose(text);
   u_fclose(out);
   u_fclose(enter);
   if (alph!=NULL) {
      free_alphabet(alph);
   }

   free_hash_table(hashtable);
   free_vector_ptr(tokens,free);
   free_vector_int(n_occur);
   free_vector_int(n_enter_pos);

   return 1;
}
u_fprintf(output,"0000000000\n");

int SENTENCES=0;
int TOKENS_TOTAL=0;
int WORDS_TOTAL=0;
int DIGITS_TOTAL=0;
u_printf("Tokenizing text...\n");
if (mode==NORMAL) {
   normal_tokenization(text,out,output,alph,tokens,hashtable,n_occur,n_enter_pos,
		   &SENTENCES,&TOKENS_TOTAL,&WORDS_TOTAL,&DIGITS_TOTAL);
}
else {
   char_by_char_tokenization(text,out,output,alph,tokens,hashtable,n_occur,n_enter_pos,
		   &SENTENCES,&TOKENS_TOTAL,&WORDS_TOTAL,&DIGITS_TOTAL);
}
u_printf("\nDone.\n");
save_new_line_positions(enter,n_enter_pos);
u_fclose(enter);
u_fclose(text);
u_fclose(out);
u_fclose(output);
write_number_of_tokens(tokens_txt,encoding_output,bom_output,tokens->nbelems);
// we compute some statistics
get_snt_path(argv[vars->optind],tokens_txt);
strcat(tokens_txt,"stats.n");
output=u_fopen_creating_versatile_encoding(encoding_output,bom_output,tokens_txt,U_WRITE);
if (output==NULL) {
   error("Cannot write %s\n",tokens_txt);
}
else {
   compute_statistics(output,tokens,alph,SENTENCES,TOKENS_TOTAL,WORDS_TOTAL,DIGITS_TOTAL);
   u_fclose(output);
}
// we save the tokens by frequence
get_snt_path(argv[vars->optind],tokens_txt);
strcat(tokens_txt,"tok_by_freq.txt");
output=u_fopen_creating_versatile_encoding(encoding_output,bom_output,tokens_txt,U_WRITE);
if (output==NULL) {
   error("Cannot write %s\n",tokens_txt);
}
else {
   sort_and_save_by_frequence(output,tokens,n_occur);
   u_fclose(output);
}
// we save the tokens by alphabetical order
get_snt_path(argv[vars->optind],tokens_txt);
strcat(tokens_txt,"tok_by_alph.txt");
output=u_fopen_creating_versatile_encoding(encoding_output,bom_output,tokens_txt,U_WRITE);
if (output==NULL) {
   error("Cannot write %s\n",tokens_txt);
}
else {
   sort_and_save_by_alph_order(output,tokens,n_occur);
   u_fclose(output);
}
free_hash_table(hashtable);
free_vector_ptr(tokens,free);
free_vector_int(n_occur);
free_vector_int(n_enter_pos);
if (alph!=NULL) {
   free_alphabet(alph);
}
free_OptVars(vars);
return 0;
}
Пример #5
0
// Build Model attribute statistics from iteration models
int US_DmgaMcStats::build_model_stats( int niters,
      QVector< US_Model >& imodels, QVector< QVector< double > >& astats )
{
   QVector< double > concs;
   QVector< double > vbars;
   QVector< double > mwts;
   QVector< double > scos;
   QVector< double > dcos;
   QVector< double > ff0s;
   QVector< double > kds;
   QVector< double > koffs;
   const int ncatt  = 6;
   const int naatt  = 2;
   int ncomp        = imodels[ 0 ].components.size();
   int nasso        = imodels[ 0 ].associations.size();
   int ntatts       = ncomp * ncatt + nasso * naatt;
   astats.resize( ntatts );

   int kt           = 0;
qDebug() << "DmS:BMs:  ncomp" << ncomp << "nasso" << nasso
 << "ntatts" << ntatts << "niters" << niters;

   // Get statistics for each component
   for ( int ii = 0; ii < ncomp; ii++ )
   {
      concs.fill( 0.0, niters );
      vbars.fill( 0.0, niters );
      mwts .fill( 0.0, niters );
      scos .fill( 0.0, niters );
      dcos .fill( 0.0, niters );
      ff0s .fill( 0.0, niters );
      int i0         = qMax( 0, ii - 1 );

      for ( int jj = 0; jj < niters; jj++ )
      {
         US_Model::SimulationComponent* sc = &imodels[ jj ].components[ ii ];
         US_Model::SimulationComponent* s0 = &imodels[ jj ].components[ i0 ];
         concs[ jj ]    = sc->signal_concentration > 0.0 ?
                          sc->signal_concentration :
                          s0->signal_concentration;
         vbars[ jj ]    = sc->vbar20;
         mwts [ jj ]    = sc->mw;
         scos [ jj ]    = sc->s;
         dcos [ jj ]    = sc->D;
         ff0s [ jj ]    = sc->f_f0;
      }

      compute_statistics( niters, concs, concs, astats[ kt++ ] );
      compute_statistics( niters, vbars, concs, astats[ kt++ ] );
      compute_statistics( niters, mwts , concs, astats[ kt++ ] );
      compute_statistics( niters, scos , concs, astats[ kt++ ] );
      compute_statistics( niters, dcos , concs, astats[ kt++ ] );
      compute_statistics( niters, ff0s , concs, astats[ kt++ ] );
qDebug() << "DmS:BMs:   ii" << ii << "mean c v w s d k"
 << astats[kt-6][2] << astats[kt-5][2] << astats[kt-4][2]
 << astats[kt-3][2] << astats[kt-2][2] << astats[kt-1][2];
   }

   // Get statistics for reactions
   for ( int ii = 0; ii < nasso; ii++ )
   {
      US_Model::Association* as = &imodels[ 0 ].associations[ ii ];
      int nrcs        = as->rcomps.size();
      int rc1         = as->rcomps[ 0 ];
      int rc2         = as->rcomps[ 1 ];
      //int stoi1       = as->stoichs[ 0 ];
      //int stoi2       = as->stoichs[ 1 ];
      //int stoi3       = ( nrcs == 2 ) ? stoi2 : as->stoichs[ 2 ];
      concs.fill( 0.0, niters );
      kds  .fill( 0.0, niters );
      koffs.fill( 0.0, niters );

      if ( nrcs == 2 )
      {  // Single reactant and a product
         for ( int jj = 0; jj < niters; jj++ )
         {
            US_Model::Association* as = &imodels[ jj ].associations[ ii ];
            US_Model::SimulationComponent* sc = &imodels[ jj ].components[rc1];
            concs[ jj ]    = sc->signal_concentration;
            kds  [ jj ]    = as->k_d;
            koffs[ jj ]    = as->k_off;
         }

         compute_statistics( niters, kds,   concs, astats[ kt++ ] );
         compute_statistics( niters, koffs, concs, astats[ kt++ ] );
      }

      else
      {  // Two reactants and a product (sum concentrations)
         for ( int jj = 0; jj < niters; jj++ )
         {
            US_Model::Association* as = &imodels[ jj ].associations[ ii ];
            US_Model::SimulationComponent* sc = &imodels[ jj ].components[rc1];
            US_Model::SimulationComponent* c2 = &imodels[ jj ].components[rc2];
            concs[ jj ]    = sc->signal_concentration
                           + c2->signal_concentration;
            kds  [ jj ]    = as->k_d;
            koffs[ jj ]    = as->k_off;
         }

         compute_statistics( niters, kds,   concs, astats[ kt++ ] );
         compute_statistics( niters, koffs, concs, astats[ kt++ ] );
qDebug() << "DmS:BMs:   ii" << ii << "mean kd ko"
 << astats[kt-2][2] << astats[kt-1][2];
      }
   }

qDebug() << "DmS:BMs: RETURN w ntatts" << ntatts;
   return ntatts;
}