unsigned int CAlignmentRefiner::GetBlocksToAlign(unsigned int nBlocks, vector<unsigned int>& blocks, string& msg, bool useExtras) { bool skip = false; unsigned int first = 0, last = nBlocks - 1; blocks.clear(); if (nBlocks == 0) return 0; CArgs args = GetArgs(); unsigned int nExtra = (useExtras) ? (unsigned int) args.GetNExtra() : 0; // If specify realignment of all blocks, the default settings are OK. // Otherwise, use the range specified in the -f and -l flags. // Recall that the command line takes in a one-based integer; blocks is zero-based. if (!args["ab"]) { if (args["f"]) { first = (unsigned) args["f"].AsInteger() - 1; } if (args["l"]) { last = (unsigned) args["l"].AsInteger() - 1; } if (first >= nBlocks) { first = 0; } if (last < first || last >= nBlocks) { last = nBlocks - 1; } } // If there are any extra arguments provided, they refer to block numbers to freeze. msg = "\nAligning blocks: "; for (unsigned int i = first; i <= last; ++i) { if (nExtra > 0) { skip = false; for (size_t extra = 1; extra <= nExtra; ++extra) { if (args[extra].AsInteger() - 1 == (int) i) { skip = true; break; } } if (skip) continue; } blocks.push_back(i); msg.append(NStr::UIntToString(i+1) + " "); if ((last-first+1)%15 == 0 && first != 0) msg.append("\n"); } //TERSE_INFO_MESSAGE_CL("message in GetBlocksToAlign:\n" << msg); return blocks.size(); }
//------------------------------------------------------------------// void Console::cmd_set(const String &args){ CArgs cargs; tokenize( &cargs, args ); if( cargs.size() == 1 ){ String val = get( cargs[0] ); logc( format(" %s = %s") %cargs[0] %val ); } else if( cargs.size() == 2 ){ set( cargs[0], cargs[1] ); logc( format(" %s = %s") %cargs[0] %cargs[1] ); } else{ logw("ERROR: Too many arguments!"); } }
int main( int argc, char* argv[] ) { /* ------------------ */ /* Parse command line */ /* ------------------ */ gArgs.SetCmdLine( argc, argv ); /* ------- */ /* Convert */ /* ------- */ clock_t t0 = StartTiming(); if( !IDBGetImageDims( gW, gH, gArgs.idb, flog ) ) exit( 42 ); gMeta.Init( gArgs.meta ); if( isAff ) ConvertA(); else ConvertH(); StopTiming( flog, "Convert", t0 ); /* ---- */ /* Done */ /* ---- */ fprintf( flog, "\n" ); fclose( flog ); return 0; }
CRef<CBlastOptionsHandle> CBlastnAppArgs::x_CreateOptionsHandle(CBlastOptions::EAPILocality locality, const CArgs& args) { _ASSERT(args.Exist(kTask)); _ASSERT(args[kTask].HasValue()); return x_CreateOptionsHandleWithTask(locality, args[kTask].AsString()); }
CRef<CBlastOptionsHandle> CPsiBlastAppArgs::x_CreateOptionsHandle(CBlastOptions::EAPILocality locality, const CArgs& args) { if (args.Exist(kArgPHIPatternFile) && args[kArgPHIPatternFile]) return CRef<CBlastOptionsHandle>(new CPHIBlastProtOptionsHandle(locality)); else return CRef<CBlastOptionsHandle>(new CPSIBlastOptionsHandle(locality)); }
int main( int argc, char* argv[] ) { gArgs.SetCmdLine( argc, argv ); ReadParams(); ChannelLoop(); for( int i = 0; i < 4; ++i ) RasterFree( ffras[i] ); fprintf( flog, "\n" ); fclose( flog ); return 0; }
int main( int argc, char **argv ) { clock_t t0 = StartTiming(); /* ---------- */ /* Parameters */ /* ---------- */ MPIInit( argc, argv ); if( !gArgs.SetCmdLine( argc, argv ) || !gArgs.GetRanges() ) { MPIExit(); exit( 42 ); } /* ------------ */ /* Initial data */ /* ------------ */ if( !LayerCat( vL, gArgs.tempdir, gArgs.cachedir, gArgs.zolo, gArgs.zohi, false ) ) { MPIExit(); exit( 42 ); } InitTables( gArgs.zilo, gArgs.zihi ); { CLoadPoints *LP = new CLoadPoints; LP->Load( gArgs.tempdir, gArgs.cachedir ); delete LP; } /* ----- */ /* Solve */ /* ----- */ printf( "\n---- Solve ----\n" ); SetSolveParams( gArgs.regtype, gArgs.Wr, gArgs.Etol ); XArray Xevn, Xodd; if( !strcmp( gArgs.mode, "A2A" ) ) { Xevn.Load( gArgs.prior ); if( gArgs.untwist ) UntwistAffines( Xevn ); Xodd.Resize( 6 ); Solve( Xevn, Xodd, gArgs.iters ); } else if( !strcmp( gArgs.mode, "A2H" ) ) { Xevn.Resize( 8 ); { // limit A lifetime XArray *A = new XArray; A->Load( gArgs.prior ); if( gArgs.untwist ) UntwistAffines( *A ); Solve( *A, Xevn, 1 ); delete A; } Xodd.Resize( 8 ); Solve( Xevn, Xodd, gArgs.iters ); } else if( !strcmp( gArgs.mode, "H2H" ) ) { Xevn.Load( gArgs.prior ); Xodd.Resize( 8 ); Solve( Xevn, Xodd, gArgs.iters ); } else if( !strcmp( gArgs.mode, "eval" ) ) { Xevn.Load( gArgs.prior ); if( gArgs.untwist ) UntwistAffines( Xevn ); gArgs.iters = 0; } else { // split Xevn.Load( gArgs.prior ); gArgs.iters = 0; } const XArray& Xfinal = ((gArgs.iters & 1) ? Xodd : Xevn); /* ----------- */ /* Postprocess */ /* ----------- */ if( gArgs.mode[0] == 's' ) { Split S( Xfinal, gArgs.splitmin ); S.Run(); } else { Evaluate( Xfinal ); if( gArgs.mode[0] != 'e' ) Xfinal.Save(); } /* ------- */ /* Cleanup */ /* ------- */ if( !wkid ) { printf( "\n" ); StopTiming( stdout, "Lsq", t0 ); } MPIExit(); VMStats( stdout ); return 0; }
void setOptions (CArgs &arg) { // File reading options if (arg.Search("--file")) { par::file = arg.parameters[ arg.Index("--file") ]; if (par::pedfile != "null" or par::mapfile != "null") error ("Cannot specify a map or pedfile while using the --file option"); par::pedfile = par::file + ".ped"; par::mapfile = par::file + ".map"; arg.clinelist.push_back("--file"); arg.clinevalue.push_back(par::file); } if (arg.Search("--ped")) { if (par::file != "null") error ("Cannot specify --file and --ped at the same time"); par::pedfile = arg.parameters[ arg.Index("--ped") ]; arg.clinelist.push_back("--ped"); arg.clinevalue.push_back(par::pedfile); } if (arg.Search("--map")) { if (par::file != "null") error ("Cannot specify --file and --map at the same time"); par::mapfile = arg.parameters[ arg.Index("--map") ]; arg.clinelist.push_back("--map"); arg.clinevalue.push_back( par::mapfile ); } if (arg.Search("--tfile")) { par::tfile = arg.parameters[ arg.Index("--tfile") ]; if (par::tpedfile != "null" or par::tfamfile != "null") error ("Cannot specify a tped file or tfam file while using the --tfile option"); par::tpedfile = par::tfile + ".tped"; par::tfamfile = par::tfile + ".tfam"; arg.clinelist.push_back("--tfile"); arg.clinevalue.push_back( par::tfile ); } if (arg.Search("--tped")) { if (par::tfile != "null") error ("Cannot specify --tfile and --tped at the same time"); par::tpedfile = arg.parameters[ arg.Index("--tped") ]; arg.clinelist.push_back("--tped"); arg.clinevalue.push_back(par::tfamfile); } if (arg.Search("--tfam")) { if (par::tfile != "null") error ("Cannot specify --tfile and --tfam at the same time"); par::tfamfile = arg.parameters[ arg.Index("--tfam") ]; arg.clinelist.push_back("--tfam"); arg.clinevalue.push_back( par::tfamfile ); } // if (arg.Search("--bfile")) // { // par::bfile = arg.parameters[ arg.Index("--bfile") ]; // // if (par::bpedfile != "null" or par::bmapfile != "null") error ("Cannot specify a bped or bmap file separately when using --bfile option"); // // par::bpedfile = par::bfile + ".bped"; // par::bmapfile = par::bfile + ".bmap"; // par::bfamfile = par::bfile + ".bfam"; // // arg.clinelist.push_back("--bfile"); // arg.clinevalue.push_back(par::bfile); // } // // if (arg.Search("--bped")) // { // if (par::bfile != "null") error ("Cannot specify --bfile and --bped at the same time"); // // par::bpedfile = arg.parameters[ arg.Index("--bped") ]; // // arg.clinelist.push_back("--bped"); // arg.clinevalue.push_back(par::bpedfile); // } // // if (arg.Search("--bmap")) // { // if (par::bfile != "null") error ("Cannot specify --bfile and --bmap at the same time"); // // par::bmapfile = arg.parameters[ arg.Index("--bmap") ]; // // arg.clinelist.push_back("--bmap"); // arg.clinevalue.push_back(par::bmapfile); // } // // if (arg.Search("--bfam")) // { // if (par::bfile != "null") error ("Cannot specify --bfile and --bfam at the same time"); // } // File writing options if (arg.Search("--recode")) { par::recode = true; arg.clinelist.push_back("--recode"); arg.clinevalue.push_back(" "); } if (arg.Search("--transpose")) { par::transpose = true; arg.clinelist.push_back("--transpose"); arg.clinevalue.push_back(" "); } if (arg.Search("--webduo")) { par::webduo = true; arg.clinelist.push_back("--webduo"); arg.clinevalue.push_back(" "); } // if (arg.Search("--binary")) // { // par::binary = true; // // arg.clinelist.push_back("--binary"); // arg.clinevalue.push_back(" "); // } if (arg.Search("--genome")) { par::genome = true; par::genomefile = arg.parameters[ arg.Index("--genome") ]; arg.clinelist.push_back("--genome"); arg.clinevalue.push_back(" "); } if (arg.Search("--out")) { par::outfile = arg.parameters[ arg.Index("--out") ]; arg.clinelist.push_back("--out"); arg.clinevalue.push_back(par::outfile); } // // Analysis options // if (arg.Search("--findblocks")) // { // par::findblocks = true; // // arg.clinelist.push_back("--findblocks"); // arg.clinevalue.push_back(" "); // } // // if (arg.Search("--makeucsc")) // { // par::makeucsc = true; // // arg.clinelist.push_back("--makeucsc"); // arg.clinevalue.push_back(" "); // } if (arg.Search("--map3")) { par::map3 = true; arg.clinelist.push_back("--map3"); arg.clinevalue.push_back(" "); } if (arg.Search("--silent")) { par::verbose = false; arg.clinelist.push_back("--silent"); arg.clinevalue.push_back(" "); } if (arg.Search("--version")) { par::version = true; arg.clinelist.push_back("--version"); arg.clinevalue.push_back(" "); } if (arg.Search("--counts")) { par::counts = true; arg.clinelist.push_back("--counts"); arg.clinevalue.push_back(" "); } if (arg.Search("--summary")) { par::summary = true; arg.clinelist.push_back("--summary"); arg.clinevalue.push_back(" "); } if (arg.Search("--specified")) { par::specified = true; arg.clinelist.push_back("--specified"); arg.clinevalue.push_back(" "); } if (arg.Search("--calculated")) { par::calculated = true; arg.clinelist.push_back("--calculated"); arg.clinevalue.push_back(" "); } if (arg.Search("--conflicting")) { par::conflicting = true; arg.clinelist.push_back("--conflicting"); arg.clinevalue.push_back(" "); } // Help data if (arg.Search("--help") or arg.Search("-h") or arg.parametercount == 1) { cout << ("\n" "snpduo --file <fileroot> Specify .ped and .map files\n" " --ped <ped file> Specify .ped file name (requires --map)\n" " --map <map file> Specify .map file name\n" " --tfile <fileroot> Specify .tfam and .tped files\n" " --tped <tped file> Specify .tped file name (requires --tfam)\n" " --tfam <tfam file> Specify .tfam file name\n" " --genome <filename> Specify a PLINK .genome file for input\n" " **Warning: Cannot get expected relationships from this file\n" "\n" " --out <fileroot> Specify the root name for output files\n" "\n" " --counts Prints IBS 0,1,2 and 2* (AB->AB) counts to .count file\n" " --summary Prints counts + Mean and Standard Deviation of IBS to .summary file\n" " --specified Prints relationships specified in input to .specified file\n" " --calculated Prints calculated relationships from Mean and SD to .theoretical file\n" " --conflicting Prints comparisons where specified relationship is different from calculated\n" " relationship to .conflicting file\n" // "\n" // " --findblocks Find IBS blocks\n" // " --makeucsc Turn blocks into UCSC track (.bed)\n" "\n" " --map3 Specifies no genetic distance column\n" "\n" " --recode Recodes data into ped/map files when used alone\n" " --transpose When used with --recode outputs a tped/tfam files\n" " --webduo When used with --recode outputs file compatible with\n" " the web version of SNPduo (Custom format)\n" " **Warning: Uses A/B coding but will NOT be the same\n" " as the platform's A/B coding scheme. Only for Web SNPduo!\n" "\n" " --silent No messages printed to screen\n" " --version Print version information and exit\n" "\n" " --help / -h Invokes this help menu\n"); shutdown(); } }
void CAgpValidateApplication::x_ValidateUsingFiles(const CArgs& args, CNcbiOstream* out) { if(m_reader.m_is_chr) { if(m_reader.m_explicit_scaf) { if(!m_use_xml) { cout << "===== Reading Chromosome from scaffold AGP =====" << endl; // second header - for details that are printed below the summary and stats if(out) *out << "===== Chromosome from scaffold AGP =====" << endl; } } // else: cout << "===== Reading Chromosome from component AGP =====" << endl; } else if(m_reader.m_explicit_scaf) { if(!m_use_xml) { cout << "===== Reading Scaffold from component AGP =====" << endl; if(out) *out << "===== Scaffold from component AGP =====" << endl; // header for details that are printed below } } if( 0==(m_ValidationType&VT_Acc) && args["out"].HasValue()) { CAgpCompSpanSplitter *comp_splitter = new CAgpCompSpanSplitter(&(args["out"].AsOutputFile())); m_reader.SetRowOutput(comp_splitter); } if (args.GetNExtra() == 0) { x_ValidateFile(cin); } else { SIZE_TYPE num_fasta_files=0; bool allowFasta = !m_reader.m_explicit_scaf; for (unsigned int i = 1; i <= args.GetNExtra(); i++) { m_CurrentFileName = args['#' + NStr::IntToString(i)].AsString(); if(m_CurrentFileName=="-chr") { if(m_reader.m_is_chr) { cerr << "Error -- second -chr is not supported.\n"; exit(1); } if(!m_reader.m_explicit_scaf) { cerr << "Error -- -chr after a file, but no preceding -scaf. Expecting:\n" << " -scaf Scaffold_AGP_file(s) -chr Chromosome_AGP_file(s)\n"; exit(1); } m_reader.PrintTotals(cout, m_use_xml); m_reader.Reset(true); pAgpErr->ResetTotals(); if(!m_use_xml) { cout << "\n===== Reading Chromosome from scaffold AGP =====" << endl; if(out) *out << "\n===== Chromosome from scaffold AGP =====" << endl;// header for details that are printed below } continue; } //CNcbiIstream& istr = args['#' + NStr::IntToString(i)].AsInputFile(); CNcbiIfstream istr(m_CurrentFileName.c_str()); if (!istr) { cerr << "Error -- unable to open file : " << m_CurrentFileName << "\n"; exit (1); } char ch=0; if(allowFasta) { istr.get(ch); istr.putback(ch); } if(ch=='>') { x_LoadLenFa(istr, m_CurrentFileName); num_fasta_files++; } else { if(allowFasta && num_fasta_files) x_ReportFastaSeqCount(); if( args.GetNExtra()-num_fasta_files>1 ) pAgpErr->StartFile(m_CurrentFileName); x_ValidateFile(istr); allowFasta=false; } } if(num_fasta_files==args.GetNExtra()) { //cerr << "No AGP files."; exit (1); if(allowFasta && num_fasta_files) x_ReportFastaSeqCount(); x_ValidateFile(cin); } } }
int main(int argc,char* argv[]) { int res=0; CCfg::init(); //help(argc,argv); CRingArgs ringArgs; ringArgs.check(argc,argv); CTrace::add("new request"); CTrace::add(argc,argv); CEnv env; env.init(argc,argv); CArgs args; args.parse(argc,argv); CCollect collect; collect.init(argc,argv); bool needCollect=args.getNeedCollect(); //是编译操作 if(needCollect) { //获取obj文件名称 std::string oldFile=args.getObjFileName(); std::string newFile; //生成收集信息目录 res=collect.prepareObjFile(oldFile,newFile); cond_check_r(0==res,"createObjFilePath failed",-4); char** newArgs=NULL; //拿到新的gcc命令,进行临时编译,获取obj文件对应的所有头文件,源文件信息 res=args.createNewCmd(newFile,newArgs); cond_check_r(0==res,"getOriCmd failed",-1); cond_check_r(NULL!=newArgs,"oriArgs is NULL",-2); cond_check_r(NULL!=newArgs[0],"args[0] is NULL",-3); //执行新的gcc命令 int pid=fork(); cond_check_r(0<=pid,"fork failed",-4); if(0==pid) { execvp(newArgs[0],newArgs); printf("child process execvp failed\n"); exit(-1); } int status; waitpid(pid,&status,0); int childRetCode=WEXITSTATUS(status); if(0!=childRetCode) return childRetCode; //复制obj文件依赖的所有文件 res=collect.copyFiles(newFile); cond_check_r(0==res,"collect copy files failed",-4); utils::rm(newFile.c_str()); std::map<std::string,std::string> macros; res=args.getMacros(macros); cond_check_r(0==res,"getMacros failed",-5); res=collect.addMacros(macros); cond_check_r(0==res,"collect addMacros failed",-6); } //执行原来的编译操作 { char** oriArgs=NULL; res=args.getOriCmd(oriArgs); cond_check_r(0==res,"getOriCmd failed",-1); cond_check_r(NULL!=oriArgs,"oriArgs is NULL",-2); cond_check_r(NULL!=oriArgs[0],"args[0] is NULL",-3); execvp(oriArgs[0],oriArgs); printf("execvp failed\n"); exit(-1); } return 0; }
void CAlignmentRefiner::EchoSettings(ostream& echoStream, bool echoLOO, bool echoBE) { static string yes = "Yes", no = "No"; CArgs args = GetArgs(); unsigned int nExtra = (unsigned int) args.GetNExtra(); if ((!echoLOO && !echoBE) || (echoLOO && echoBE)) { echoStream << "Global Refinement Parameters:" << endl; echoStream << "=================================" << endl; echoStream << "Number of trials = " << m_nTrials << endl; echoStream << "Number of cycles per trial = " << m_nCycles << endl; echoStream << "Alignment score deviation threshold = " << m_scoreDeviationThreshold << endl; if (nExtra > 0) { echoStream << "Extra argument(s) freeze " << ((m_loo.extrasAreRows) ? "Row:\n " : "Block:\n "); for (size_t extra = 1; extra <= nExtra; ++extra) { echoStream << args[extra].AsInteger() << " "; } echoStream << endl; } else { echoStream << "No extra arguments that exclude specific rows/blocks from refinement." << endl; } // echoStream << "Quiet details mode? " << ((m_quietDetails) ? "ON" : "OFF") << endl; // echoStream << "Forced threshold (for MC only) = " << m_forcedThreshold << endl; echoStream << "Quiet mode? " << ((m_quietMode) ? "ON" : "OFF") << endl; echoStream << endl; } if (echoLOO) { echoStream << "Leave-One_Out parameters:" << endl; echoStream << "=================================" << endl; echoStream << "LOO on? " << ((m_loo.doLOO) ? yes : no) << endl; if (m_loo.doLOO) { echoStream << "Row selection order: " << RefinerRowSelectorCodeToStr(m_loo.selectorCode) << endl; echoStream << "Number left out between PSSM recomputation = " << m_loo.lno << endl; echoStream << "Freeze alignment of rows with structure? " << ((m_loo.fixStructures) ? yes : no) << endl; echoStream << "Use full sequence or aligned footprint? " << ((m_loo.fullSequence) ? "Full" : "Aligned") << endl; echoStream << "N-terminal extension allowed = " << m_loo.nExt << endl; echoStream << "C-terminal extension allowed = " << m_loo.cExt << endl; echoStream << "Converged after fraction of rows left out do not change score = " << m_loo.sameScoreThreshold << endl; echoStream << "Random number generator seed = " << m_loo.seed << endl; echoStream << "LOO loop percentile: longest loop allowed = max initial loop * " << m_loo.percentile << endl; echoStream << "LOO extension to longest loop allowed = " << m_loo.extension << endl; echoStream << "LOO absolute maximum longest loop (zero == no max) = " << m_loo.cutoff << endl; } echoStream << endl; } if (echoBE) { string algMethod = "Invalid Method"; string columnMethod = algMethod; switch (m_blockEdit.algMethod) { case eSimpleExtendAndShrink: algMethod = "Extend and Shrink"; break; case eSimpleExtend: algMethod = "Extend Only"; break; case eSimpleShrink: algMethod = "Shrink Only"; break; case eGreedyExtend: algMethod = "Greedy Extend Only"; break; default: break; }; switch (m_blockEdit.columnMethod) { case ePercentAtOrOverThreshold: columnMethod = "% Rows at or Over Threshold"; break; case eSumOfScores: columnMethod = "Sum of Scores"; break; case eMedianScore: columnMethod = "Median Score"; break; case ePercentOfWeightOverThreshold: columnMethod = "% Score Weight at or Over Threshold"; break; case eCompoundScorer: columnMethod = "3.3.3"; if (GetArgs()["be_score"].AsString() != "3.3.3") { columnMethod = "Compound Scoring"; } break; default: break; }; echoStream << "Block editing parameters:" << endl; echoStream << "=================================" << endl; echoStream << "block editing on? " << ((m_blockEdit.editBlocks) ? yes : no) << endl; if (m_blockEdit.editBlocks) { echoStream << "block shrinking on? " << ((m_blockEdit.canShrink) ? yes : no) << endl; echoStream << "extend first? " << ((m_blockEdit.extendFirst) ? yes : no) << endl; echoStream << endl; echoStream << "block editing method = " << algMethod << endl; echoStream << "column scoring method = " << columnMethod << endl; echoStream << endl; // echoStream << "not used: column meth2 = " << m_blockEdit.columnMethod2 << endl << endl; if (GetArgs()["be_score"].AsString() == "3.3.3") { echoStream << "(used for 3.3.3 scoring only):" << endl; echoStream << " median threshold = " << m_blockEdit.median << endl; echoStream << " negative score fraction = " << m_blockEdit.negScoreFraction << endl; echoStream << " negative row fraction = " << m_blockEdit.negRowsFraction << endl; } else { echoStream << "minimum block size = " << m_blockEdit.minBlockSize << endl; echoStream << "column-scorer threshold = " << m_blockEdit.columnScorerThreshold << endl; echoStream << "extension threshold = " << m_blockEdit.extensionThreshold << endl; echoStream << "shrinkage threshold = " << m_blockEdit.shrinkageThreshold << endl; } } echoStream << endl; } }
RefinerResultCode CAlignmentRefiner::ExtractLOOArgs(unsigned int nAlignedBlocks, string& msg) { int selectionOrder; unsigned int nBlocksMade, nExtra, extra; // Get arguments CArgs args = GetArgs(); RefinerResultCode result = eRefinerResultOK; msg.erase(); m_loo.doLOO = (!args["no_LOO"]); m_loo.fixStructures = (args["fix_structs"]); m_loo.extrasAreRows = (!args["extras_are_blocks"]); // "selection_order" is mandatory (unless -no_LOO is present) and constrained to {0, 1, 2}. // number of trials is only relevant for a random selection order. selectionOrder = (m_loo.doLOO) ? args["selection_order"].AsInteger() : 0; switch (selectionOrder) { case 0: m_loo.selectorCode = eRandomSelectionOrder; break; case 1: m_nTrials = 1; m_loo.selectorCode = eWorstScoreFirst; break; case 2: m_nTrials = 1; m_loo.selectorCode = eBestScoreFirst; break; }; if (m_loo.doLOO) { m_loo.fullSequence = (args["fs"]); m_loo.nExt = (args["nex"]) ? args["nex"].AsInteger() : args["ex"].AsInteger(); m_loo.cExt = (args["cex"]) ? args["cex"].AsInteger() : args["ex"].AsInteger(); m_loo.seed = (args["seed"]) ? args["seed"].AsInteger() : 0; m_loo.lno = (unsigned int) args["lno"].AsInteger(); m_loo.sameScoreThreshold = args["convSameScore"].AsDouble(); m_loo.percentile = args["p"].AsDouble(); m_loo.extension = (unsigned) args["x"].AsInteger(); m_loo.cutoff = (unsigned) args["c"].AsInteger(); } if (m_loo.doLOO) { if (m_loo.extrasAreRows) { nExtra = (unsigned int) args.GetNExtra(); for (unsigned int i = 1; i <= nExtra; ++i) { extra = (unsigned int) args[i].AsInteger() - 1; m_loo.rowsToExclude.push_back(extra); } } // 'false' == don't exclude any blocks using extra cmd line arguments; nBlocksMade = GetBlocksToAlign(nAlignedBlocks, m_loo.blocks, msg, !m_loo.extrasAreRows); msg = "Freeze " + NStr::UIntToString(nAlignedBlocks - nBlocksMade) + " blocks in ExtractLOOArgs.\n"; } return result; }
int CAlignmentRefiner::Run(void) { CCdd cdd; unsigned int alWidth; unsigned int nBlocksFromAU; string message; // Get arguments CArgs args = GetArgs(); string fname, err; string basename = args["o"].AsString() + "_", suffix = ".cn3"; // Stream to results file, if provided, or cout // (NOTE: "x_lg" is just a workaround for bug in SUN WorkShop 5.1 compiler) ostream* x_lg = args["details"] ? &args["details"].AsOutputFile() : &cout; ostream& detailsStream = *x_lg; SetDiagStream(x_lg); // send all diagnostic messages to the same stream // Set to info level here, not in main, to avoid info messages about missing log files. SetDiagPostLevel(eDiag_Info); // Set up details stream first... if (args["details"]) { string str; detailsStream << args.Print(str) << endl << endl; detailsStream << string(72, '=') << endl; detailsStream.precision(2); // if (args["qd"]) m_quietDetails = true; } // Get initial alignment (in a Cdd blob) from the input file; // convert to an AlignmentUtility object. if (!ReadCD(args["i"].AsString(), &cdd)) { ERROR_MESSAGE_CL("error reading CD from input file " << args["i"].AsString()); return eRefinerResultCantReadCD; } AlignmentUtility* au = new AlignmentUtility(cdd.GetSequences(), cdd.GetSeqannot()); const BlockMultipleAlignment* bma = (au && au->Okay()) ? au->GetBlockMultipleAlignment() : NULL; if (!bma) { delete au; ERROR_MESSAGE_CL("Found invalid alignment in CD " << args["i"].AsString()); return eRefinerResultAlignmentUtilityError; } nBlocksFromAU = bma->NAlignedBlocks(); alWidth = bma->AlignmentWidth(); TERSE_INFO_MESSAGE_CL("\nRows in alignment: " << bma->NRows()); TERSE_INFO_MESSAGE_CL("Alignment width : " << alWidth); TERSE_INFO_MESSAGE_CL("Number of Aligned Blocks after IBM: " << nBlocksFromAU << "\n"); // Some general parameters... m_nTrials = (unsigned) args["n"].AsInteger(); m_nCycles = args["nc"].AsInteger(); m_scoreDeviationThreshold = args["convScoreChange"].AsDouble(); m_quietMode = (args["q"]); // if (m_quietMode) SetDiagPostLevel(eDiag_Error); // Fill out data structure for leave-one-out parameters // LOO is performed unless -no_LOO option used RefinerResultCode looParamResult = ExtractLOOArgs(nBlocksFromAU, message); if (looParamResult != eRefinerResultOK) { ERROR_MESSAGE_CL(message); return looParamResult; } // If using a fixed selection order based on row-scores of the // initial alignment, no need to do multiple trials. if (m_nTrials > 1 && m_loo.selectorCode != eRandomSelectionOrder) { m_nTrials = 1; WARNING_MESSAGE_CL("For deterministic row-selection order, multiple trials are redundant.\nSetting number of trials to one and continuing.\n"); } //EchoSettings(detailsStream, true, false); // Fill out data structure for block editing parameters. // By default, edit blocks -- must explicitly skip with the -be_fix option. RefinerResultCode beParamResult = ExtractBEArgs(nBlocksFromAU, message); if (beParamResult != eRefinerResultOK) { ERROR_MESSAGE_CL(message); return beParamResult; } //EchoSettings(detailsStream, false, true); EchoSettings(detailsStream, true, true); if (!m_blockEdit.editBlocks && !m_loo.doLOO) { ERROR_MESSAGE_CL("Nothing will happen as both LOO and block editing have been disabled. Stopping"); return eRefinerResultInconsistentArgumentCombination; } // Perform the refinement... TRACE_MESSAGE_CL("Entering refiner engine...\n"); CBMARefinerEngine refinerEngine(m_loo, m_blockEdit, m_nCycles, m_nTrials, true, !m_quietMode, m_scoreDeviationThreshold); RefinerResultCode result = refinerEngine.Refine(au, &detailsStream); // Output final statistics and refined alignments // Get results from all trials; use reverse iterator to get them // out of the map in order of highest to lowest score. unsigned int n = 0; unsigned int trial; unsigned int nToWrite = (m_nTrials > 1) ? args["nout"].AsInteger() : 1; const RefinedAlignments& optimizedAlignments = refinerEngine.GetAllResults(); RefinedAlignmentsRevCIt rcit = optimizedAlignments.rbegin(), rend = optimizedAlignments.rend(); if (rcit != rend) { detailsStream << endl << endl << "Original Alignment Score = " << refinerEngine.GetInitialScore() << endl; detailsStream << endl << "Best Refined Alignments (in descending score order)\n\n"; } else { detailsStream << endl << "No Refined Alignments found (?)\n\n"; } for (; rcit != rend; ++rcit, ++n) { trial = rcit->second.iteration; if (rcit->second.au == NULL) { detailsStream << "Problem in trial " << trial << " -> no refined alignment available." << endl << endl; continue; } detailsStream << "Alignment " << n << ": Score = " << rcit->first << " (trial " << trial << ")" << endl; if (n < nToWrite) { err.erase(); cdd.SetSeqannot() = rcit->second.au->GetSeqAnnots(); // write output as a CD in a new file fname = basename + NStr::UIntToString(n) + "_trial" + NStr::UIntToString(trial) + suffix; detailsStream << " (written to file '" << fname << "')"; if (!WriteASNToFile(fname.c_str(), cdd, args["ob"].HasValue(), &err)) { ERROR_MESSAGE_CL("error writing output file " << fname); } } detailsStream << endl; } // Destructor of RefinerEngine cleans up map of optimized alignments // once it goes out of scope. // delete au; // delete auOriginal; if (args["details"]) args["details"].CloseFile(); return result; }
int COMSSAMerge::Run() { try { CArgs args = GetArgs(); CRef <COMSSASearch> MySearch(new COMSSASearch); ESerialDataFormat InFileType(eSerial_Xml), OutFileType(eSerial_Xml); bool obz2(false); // output bzip2 compressed? bool ibz2(false); // input bzip2 compressed? if(args["ox"]) OutFileType = eSerial_Xml; else if(args["ob"]) OutFileType = eSerial_AsnBinary; else if(args["ot"]) OutFileType = eSerial_AsnText; else if(args["obz2"]) { OutFileType = eSerial_Xml; obz2 = true; } else ERR_POST(Fatal << "output file type not given"); if(args["ix"]) InFileType = eSerial_Xml; else if(args["ib"]) InFileType = eSerial_AsnBinary; else if(args["it"]) InFileType = eSerial_AsnText; else if(args["ibz2"]) { InFileType = eSerial_Xml; ibz2 = true; } else ERR_POST(Fatal << "input file type not given"); // loop thru input files if ( args["i"].AsString() != "") { ifstream is(args["i"].AsString().c_str()); bool Begin(true); if(!is) ERR_POST(Fatal << "unable to open input file list " << args["i"].AsString()); while(!is.eof()) { string iFileName; NcbiGetline(is, iFileName, "\x0d\x0a"); if(iFileName == "" || is.eof()) continue; try { CRef <COMSSASearch> InSearch(new COMSSASearch); CSearchHelper::ReadCompleteSearch(iFileName, InFileType, ibz2, *InSearch); // InSearch->ReadCompleteSearch(iFileName, InFileType, ibz2); if(Begin) { Begin = false; MySearch->CopyCMSSearch(InSearch); } else { // add MySearch->AppendSearch(InSearch); } } catch(CException& e) { ERR_POST(Fatal << "exception: " << e.what()); return 1; } } } else if ( args.GetNExtra() ) { for (size_t extra = 1; extra <= args.GetNExtra(); extra++) { CRef <COMSSASearch> InSearch(new COMSSASearch); CSearchHelper::ReadCompleteSearch(args[extra].AsString(), InFileType, ibz2, *InSearch); //InSearch->ReadCompleteSearch(args[extra].AsString(), InFileType, ibz2); try { if(extra == 1) { // copy MySearch->CopyCMSSearch(InSearch); } else { // add MySearch->AppendSearch(InSearch); } } catch(CException& e) { ERR_POST(Fatal << "exception: " << e.what()); return 1; } } } // write out the new search auto_ptr <CNcbiOfstream> raw_out; auto_ptr <CCompressionOStream> compress_out; auto_ptr <CObjectOStream> txt_out; if( obz2 ) { raw_out.reset(new CNcbiOfstream(args["o"].AsString().c_str())); compress_out.reset( new CCompressionOStream (*raw_out, new CBZip2StreamCompressor(), CCompressionStream::fOwnProcessor)); txt_out.reset(CObjectOStream::Open(OutFileType, *compress_out)); } else { txt_out.reset(CObjectOStream::Open(args["o"].AsString().c_str(), OutFileType)); } // auto_ptr <CObjectOStream> txt_out( // CObjectOStream::Open(args["o"].AsString(), OutFileType)); if(txt_out.get()) { SetUpOutputFile(txt_out.get(), OutFileType); if (args["sw"]) { txt_out->Write(ObjectInfo(*(*MySearch->SetResponse().begin()))); } else { txt_out->Write(ObjectInfo(*MySearch)); } txt_out->Flush(); txt_out->Close(); } } catch (NCBI_NS_STD::exception& e) { ERR_POST(Fatal << "Exception in COMSSAMerge::Run: " << e.what()); } return 0; }