void ClassAdLog::AppendLog(LogRecord *log) { if (active_transaction) { if (active_transaction->EmptyTransaction()) { LogBeginTransaction *l = new LogBeginTransaction; active_transaction->AppendLog(l); } active_transaction->AppendLog(log); } else { //MD: using file pointer if (log_fp!=NULL) { if (log->Write(log_fp) < 0) { EXCEPT("write to %s failed, errno = %d", logFilename(), errno); } if( m_nondurable_level == 0 ) { //MD: flushing data -- using a file pointer if (fflush(log_fp) !=0){ EXCEPT("flush to %s failed, errno = %d", logFilename(), errno); } //MD: syncing the data as done before if (condor_fsync(fileno(log_fp)) < 0) { EXCEPT("fsync of %s failed, errno = %d", logFilename(), errno); } } } log->Play((void *)&table); delete log; } }
void ClassAdLog::LogState(FILE *fp) { LogRecord *log=NULL; ClassAd *ad=NULL; ExprTree *expr=NULL; HashKey hashval; MyString key; const char *attr_name = NULL; // This must always be the first entry in the log. log = new LogHistoricalSequenceNumber( historical_sequence_number, m_original_log_birthdate ); if (log->Write(fp) < 0) { EXCEPT("write to %s failed, errno = %d", logFilename(), errno); } delete log; table.startIterations(); while(table.iterate(ad) == 1) { table.getCurrentKey(hashval); hashval.sprint(key); log = new LogNewClassAd(key.Value(), ad->GetMyTypeName(), ad->GetTargetTypeName()); if (log->Write(fp) < 0) { EXCEPT("write to %s failed, errno = %d", logFilename(), errno); } delete log; // Unchain the ad -- we just want to write out this ads exprs, // not all the exprs in the chained ad as well. AttrList *chain = dynamic_cast<AttrList*>(ad->GetChainedParentAd()); ad->Unchain(); ad->ResetName(); attr_name = ad->NextNameOriginal(); while (attr_name) { expr = ad->LookupExpr(attr_name); // This conditional used to check whether the ExprTree is // invisible, but no codepath sets any attributes // invisible for this call. if (expr) { log = new LogSetAttribute(key.Value(), attr_name, ExprTreeToString(expr)); if (log->Write(fp) < 0) { EXCEPT("write to %s failed, errno = %d", logFilename(), errno); } delete log; } attr_name = ad->NextNameOriginal(); } // ok, now that we're done writing out this ad, restore the chain ad->ChainToAd(chain); } if (fflush(fp) !=0){ EXCEPT("fflush of %s failed, errno = %d", logFilename(), errno); } if (condor_fsync(fileno(fp)) < 0) { EXCEPT("fsync of %s failed, errno = %d", logFilename(), errno); } }
void ClassAdLog::FlushLog() { if (log_fp!=NULL) { if (fflush(log_fp) !=0){ EXCEPT("flush to %s failed, errno = %d", logFilename(), errno); } } }
bool ClassAdLog::SaveHistoricalLogs() { if(!max_historical_logs) return true; MyString new_histfile; if(!new_histfile.sprintf("%s.%lu",logFilename(),historical_sequence_number)) { dprintf(D_ALWAYS,"Aborting save of historical log: out of memory.\n"); return false; } dprintf(D_FULLDEBUG,"About to save historical log %s\n",new_histfile.Value()); if( hardlink_or_copy_file(logFilename(), new_histfile.Value()) < 0) { dprintf(D_ALWAYS,"Failed to copy %s to %s.\n",logFilename(),new_histfile.Value()); return false; } MyString old_histfile; if(!old_histfile.sprintf("%s.%lu",logFilename(),historical_sequence_number - max_historical_logs)) { dprintf(D_ALWAYS,"Aborting cleanup of historical logs: out of memory.\n"); return true; // this is not a fatal error } if( unlink(old_histfile.Value()) == 0 ) { dprintf(D_FULLDEBUG,"Removed historical log %s.\n",old_histfile.Value()); } else { // It's ok if the old file simply doesn't exist. if( errno != ENOENT ) { // Otherwise, it's not a fatal error, but definitely odd that // we failed to remove it. dprintf(D_ALWAYS,"WARNING: failed to remove '%s': %s\n",old_histfile.Value(),strerror(errno)); } return true; // this is not a fatal error } return true; }
void ClassAdLog::ForceLog() { // Force log changes to disk. This involves first flushing // the log from memory buffers, then fsyncing to disk. if (log_fp!=NULL) { // First flush FlushLog(); // Then sync if (condor_fsync(fileno(log_fp)) < 0) { EXCEPT("fsync of %s failed, errno = %d", logFilename(), errno); } } }
// ###################################################################### bool BeoWebServer::initLogFile() { // get the time of day time_t rawtime; struct tm * timeinfo; time ( &rawtime ); timeinfo = localtime ( &rawtime ); char buffer [80]; strftime (buffer,80, "%Y_%m_%d__%H_%M_%S",timeinfo); std::string startTime(buffer); itsLogFolderName = std::string(sformat("%s%s", LOG_FOLDER, startTime.c_str())); LINFO("logFoldername: %s", itsLogFolderName.c_str()); // create a log directory if (mkdir(itsLogFolderName.c_str(), 0777) == -1) { LFATAL("Cannot create log folder: %s", itsLogFolderName.c_str()); return(EXIT_FAILURE); } std::string logFilename (sformat("%s/Log_%s.txt", itsLogFolderName.c_str(), startTime.c_str())); LINFO("logFilename: %s", logFilename.c_str()); std::string cTime = std::string("Time of day: ") + startTime; LINFO("%s", cTime.c_str()); cTime += std::string("\n"); // save in a file by appending to the file itsLogFilename = logFilename; FILE *rFile = fopen(itsLogFilename.c_str(), "at"); if (rFile != NULL) { LDEBUG("saving result to %s", logFilename.c_str()); fputs(cTime.c_str(), rFile); fclose (rFile); } else LFATAL("can't create file: %s", itsLogFilename.c_str()); return true; }
void ClassAdLog::AppendLog(LogRecord *log) { if (active_transaction) { if (active_transaction->EmptyTransaction()) { LogBeginTransaction *l = new LogBeginTransaction; active_transaction->AppendLog(l); } active_transaction->AppendLog(log); } else { //MD: using file pointer if (log_fp!=NULL) { if (log->Write(log_fp) < 0) { EXCEPT("write to %s failed, errno = %d", logFilename(), errno); } if( m_nondurable_level == 0 ) { ForceLog(); // flush and fsync } } log->Play((void *)&table); delete log; } }
int main( int argc, char *argv[] ) { // try { time_t programStartTime(time(NULL) ); boost::filesystem::path workingDir( boost::filesystem::current_path() ); // ========== PROGRAM PARAMETERS ========== std::string progName( "buildrandctree" ); std::string configFilename( "/home/raid2/moreno/Code/hClustering/config/"+progName+".cfg" ); // program parameters std::string roiFilename, inputFolder, outputFolder; float memory( 0.5 ), maxNbDist( 1 ); unsigned int nbLevel( 26 ), threads( 0 ); bool keepDiscarded( false ), niftiMode( true ), debug( false ); TC_GROWTYPE growType( TC_GROWOFF ); size_t baseSize( 0 ); // Declare a group of options that will be allowed only on command line boost::program_options::options_description genericOptions( "Generic options" ); genericOptions.add_options() ( "version", "Program version" ) ( "help,h", "Produce extended program help message" ) ( "roi,r", boost::program_options::value< std::string >(&roiFilename), "file with the seed voxels coordinates." ) ( "inputf,I", boost::program_options::value< std::string >(&inputFolder), "input data folder (seed tractograms)." ) ( "outputf,O", boost::program_options::value< std::string >(&outputFolder), "output folder" ) ( "maxnbdist,d", boost::program_options::value< float >(&maxNbDist)->implicit_value(1), "[opt] maximum dissimilarity a seed voxel tract must have to its most similar neighbor not be discarded. (0,1]." ) ( "cnbhood,c", boost::program_options::value< unsigned int >(&nbLevel)->implicit_value(26), "[opt] centroid method neighborhood level. Valid values: 6, 18, 26(default), 32, 96, 124." ) ( "basesize,S", boost::program_options::value< size_t >(&baseSize), "[opt] grow homogeneous base nodes (meta-leaves) of size S. (>=2)." ) ( "basenum,N", boost::program_options::value< size_t >(&baseSize), "[opt] grow N homogeneous base nodes (meta-leaves). (>=10)." ) ; // Declare a group of options that will be allowed both on command line and in config file boost::program_options::options_description configOptions( "Configuration" ); configOptions.add_options() ( "verbose,v", "[opt] verbose output." ) ( "vista", "[opt] use vista file format (default is nifti)." ) ( "cache-mem,m", boost::program_options::value< float >(&memory)->implicit_value(0.5), "[opt] maximum of memory (in GBytes) to use for tractogram cache memory. Default: 0.5." ) ( "keep-disc,k", "[opt] keep discarded voxels data in a section of the tree file." ) ( "debugout", "[opt] write additional detailed outputs meant for debug." ) ( "pthreads,p", boost::program_options::value< unsigned int >(&threads), "[opt] number of processing cores to run the program in. Default: all available." ) ; // Hidden options, will be allowed both on command line and in config file, but will not be shown to the user. boost::program_options::options_description hiddenOptions( "Hidden options" ); //hiddenOptions.add_options() ; boost::program_options::options_description cmdlineOptions; cmdlineOptions.add(genericOptions).add(configOptions).add(hiddenOptions); boost::program_options::options_description configFileOptions; configFileOptions.add(configOptions).add(hiddenOptions); boost::program_options::options_description visibleOptions( "Allowed options" ); visibleOptions.add(genericOptions).add(configOptions); boost::program_options::positional_options_description posOpt; //this arguments do not need to specify the option descriptor when typed in //posOpt.add( "roi", -1); boost::program_options::variables_map variableMap; store(boost::program_options::command_line_parser(argc, argv).options(cmdlineOptions).positional(posOpt).run(), variableMap); std::ifstream ifs(configFilename.c_str() ); store(parse_config_file(ifs, configFileOptions), variableMap); notify( variableMap); if ( variableMap.count( "help" ) ) { std::cout << "---------------------------------------------------------------------------" << std::endl; std::cout << std::endl; std::cout << " Project: hClustering" << std::endl; std::cout << std::endl; std::cout << " Whole-Brain Connectivity-Based Hierarchical Parcellation Project" << std::endl; std::cout << " David Moreno-Dominguez" << std::endl; std::cout << " [email protected]" << std::endl; std::cout << " [email protected]" << std::endl; std::cout << " www.cbs.mpg.de/~moreno" << std::endl; std::cout << std::endl; std::cout << " For more reference on the underlying algorithm and research they have been used for refer to:" << std::endl; std::cout << " - Moreno-Dominguez, D., Anwander, A., & Knösche, T. R. (2014)." << std::endl; std::cout << " A hierarchical method for whole-brain connectivity-based parcellation." << std::endl; std::cout << " Human Brain Mapping, 35(10), 5000-5025. doi: http://dx.doi.org/10.1002/hbm.22528" << std::endl; std::cout << " - Moreno-Dominguez, D. (2014)." << std::endl; std::cout << " Whole-brain cortical parcellation: A hierarchical method based on dMRI tractography." << std::endl; std::cout << " PhD Thesis, Max Planck Institute for Human Cognitive and Brain Sciences, Leipzig." << std::endl; std::cout << " ISBN 978-3-941504-45-5" << std::endl; std::cout << std::endl; std::cout << " hClustering is free software: you can redistribute it and/or modify" << std::endl; std::cout << " it under the terms of the GNU Lesser General Public License as published by" << std::endl; std::cout << " the Free Software Foundation, either version 3 of the License, or" << std::endl; std::cout << " (at your option) any later version." << std::endl; std::cout << " http://creativecommons.org/licenses/by-nc/3.0" << std::endl; std::cout << std::endl; std::cout << " hClustering is distributed in the hope that it will be useful," << std::endl; std::cout << " but WITHOUT ANY WARRANTY; without even the implied warranty of" << std::endl; std::cout << " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" << std::endl; std::cout << " GNU Lesser General Public License for more details." << std::endl; std::cout << std::endl; std::cout << "---------------------------------------------------------------------------" << std::endl << std::endl; std::cout << "buildrandctree" << std::endl << std::endl; std::cout << "Build a centroid hierarchical tree from a set of artificially pre-generated set of tractograms yoielding a uniformly random similarity matrix and a seed neighborhood information voxel list." << std::endl << std::endl; std::cout << "* Arguments:" << std::endl << std::endl; std::cout << " --version: Program version." << std::endl << std::endl; std::cout << " -h --help: Produce extended program help message." << std::endl << std::endl; std::cout << " -r --roi: A text file with the seed voxel coordinates and the corresponding tractogram index (if tractogram naming is based on index rather than coordinates)." << std::endl << std::endl; std::cout << " -I --inputf: input data folder (containing the compact tractograms)." << std::endl << std::endl; std::cout << " -O --outputf: Output folder where tree files will be written." << std::endl << std::endl; std::cout << "[-d --maxnbdist]: Maximum dissimilarity a seed voxel tract must have to its most similar neighbor not be discarded." << std::endl; std::cout << " Valid values: (0,1] Use a value of 1 (default) if no discarding is desired." << std::endl << std::endl; std::cout << "[-c --cnbhood]: Use centroid method with C neighborhood level. Valid values: 6, 18, 24(default), 32, 96, 124." << std::endl << std::endl; std::cout << "[-S --basesize]: Merge homogeneous base nodes of size S. (mutually exclusive with -N option). Default: 0 (no homogeneous merging)." << std::endl << std::endl; std::cout << "[-N --basenum]: Grow N homogeneous base nodes. (mutually exclusive with -S option). Default: 0 (no homogeneous merging)." << std::endl << std::endl; std::cout << "[-v --verbose]: Verbose output (recommended)." << std::endl << std::endl; std::cout << "[--vista]: Read/write vista (.v) files [default is nifti (.nii) and compact (.cmpct) files]." << std::endl << std::endl; std::cout << "[-m --cache-mem]: Maximum amount of RAM memory (in GBytes) to use for temporal tractogram cache storing. Valid values [0.1,50]. Default: 0.5." << std::endl << std::endl; std::cout << "[-k --keep-disc]: Keep discarded voxel information in a specialiced section of the tree." << std::endl << std::endl; std::cout << "[--debugout]: Write additional detailed outputs meant to be used for debugging." << std::endl << std::endl; std::cout << "[-p --pthreads]: Number of processing threads to run the program in parallel. Default: use all available processors." << std::endl << std::endl; std::cout << std::endl; std::cout << "* Usage example:" << std::endl << std::endl; std::cout << " buildrandctree -r roi_lh.txt -I tractograms/ -O results/ -c 26 -N 1000 -k -m 2 -v " << std::endl << std::endl; std::cout << std::endl; std::cout << "* Outputs (in output folder defined at option -O):" << std::endl << std::endl; std::cout << " - 'cX_bin_nmt.txt' - (where X is the neighborhood level defined at option -c) non-monotonic binary-branching hierarchical tree without tree processing (if desired use processtree command)." << std::endl; std::cout << " - 'baselist_nmt.txt' - meta-leaves (base nodes defined by the us of option -N or -S) list with IDs corresponding to the non-monotonic tree file." << std::endl; std::cout << " - 'success.txt' - An empty file created when the program has sucessfully exited after completion (to help for automatic re-running scripting after failure)." << std::endl; std::cout << " - 'buildrandtree_log.txt' - A text log file containing the parameter details and in-run and completion information of the program." << std::endl; std::cout << std::endl; std::cout << " [extra outputs when using --debugout option)" << std::endl << std::endl; std::cout << " - 'cX_bin_nmt_debug.txt' - version of the counterpart file without '_debug' suffix with redundant information for debugging purposes." << std::endl; std::cout << std::endl; exit(0); } if ( variableMap.count( "verbose" ) ) { std::cout << "verbose output" << std::endl; verbose=true; } if ( variableMap.count( "pthreads" ) ) { if ( threads == 1 ) { std::cout << "Using a single processor" << std::endl; } else if( threads == 0 || threads >= omp_get_num_procs() ) { threads = omp_get_num_procs(); std::cout << "Using all available processors ( " << threads << " )." << std::endl; } else { std::cout << "Using a maximum of " << threads << " processors " << std::endl; } omp_set_num_threads( threads ); } else { threads = omp_get_num_procs(); omp_set_num_threads( threads ); std::cout << "Using all available processors ( " << threads << " )." << std::endl; } if ( variableMap.count( "vista" ) ) { if( verbose ) { std::cout << "Using vista format" << std::endl; } fileManagerFactory fmf; fmf.setVista(); niftiMode = false; } else { if( verbose ) { std::cout << "Using nifti format" << std::endl; } fileManagerFactory fmf; fmf.setNifti(); niftiMode = true; } if ( variableMap.count( "debugout" ) ) { if( verbose ) { std::cout << "Debug output files activated" << std::endl; } debug = true; } if ( variableMap.count( "version" ) ) { std::cout << progName << ", version 2.0" << std::endl; exit(0); } if ( variableMap.count( "roi" ) ) { if( !boost::filesystem::is_regular_file( boost::filesystem::path( roiFilename ) ) ) { std::cerr << "ERROR: roi file \"" <<roiFilename<< "\" is not a regular file" << std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } else if( verbose ) { std::cout << "Seed voxels roi file: " << roiFilename << std::endl; } } else { std::cerr << "ERROR: no seed voxels roi file stated" << std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } if( verbose ) { std::cout << "Maximum distance to most similar neighbor: " << maxNbDist << std::endl; } if ( maxNbDist <= 0 || maxNbDist > 1 ) { std::cerr << "ERROR: distance value used is out of bounds please use a value within (0,1]" << std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } else if ( maxNbDist == 1 && verbose ) { std::cout << "No neighbor distance restrictions will be applied" << std::endl; } else if( verbose ) { std::cout << "Seed voxels with no neighbors with tract dissimilarity lower than " << maxNbDist << " will be discarded as outliers" << std::endl; } if( verbose ) { std::cout << "Centroid neighborhood level: " << nbLevel << std::endl; } if ( ( nbLevel != 6 ) && ( nbLevel != 18 ) && ( nbLevel != 26 ) && ( nbLevel != 32 ) && ( nbLevel != 92 ) && ( nbLevel != 124 ) ) { std::cerr << "ERROR: invalid nbhood level, only (6,18,26,32,92,124) are accepted" << std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } if ( ( variableMap.count( "basesize" ) && variableMap.count( "basenum" ) ) ) { std::cerr << "ERROR: options --basesize (-S) and --basenum (-N) are mutually exclusive" << std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } if ( variableMap.count( "basesize" ) ) { if( baseSize <= 1 ) { std::cerr << "ERROR: base node (meta-leaf) size must be greater than 1" << std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } else { if( verbose ) { std::cout << "Initial merging stage up to homogeneous base nodes of size: " << baseSize << std::endl; } growType = TC_GROWSIZE; } } if ( variableMap.count( "basenum" ) ) { if( baseSize < 10 ) { std::cerr << "ERROR: base node (meta-leaf) number must be equal or greater than 10" << std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } else { if( verbose ) { std::cout << "Initial merging stage up to " << baseSize << " homogeneous base nodes (meta-leaves)" << std::endl; } growType = TC_GROWNUM; } } if( growType == TC_GROWOFF && verbose ) { std::cout << "No homogeneous merging stage" << std::endl; } if ( variableMap.count( "keep-disc" ) ) { if( verbose ) { std::cout << "Discarded voxel coordinates will be saved in an special section fo the tree file" << std::endl; } keepDiscarded = true; } else { if( verbose ) { std::cout << "Discarded voxel coordinates will not be saved" << std::endl; } keepDiscarded = false; } if ( variableMap.count( "inputf" ) ) { if( !boost::filesystem::is_directory( boost::filesystem::path( inputFolder ) ) ) { std::cerr << "ERROR: input seed tractogram folder \"" <<inputFolder<< "\" is not a directory" << std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } else if( verbose ) { std::cout << "Input seed tractogram folder: " << inputFolder << std::endl; } } else { std::cerr << "ERROR: no input seed tractogram stated" << std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } if ( variableMap.count( "outputf" ) ) { if( !boost::filesystem::is_directory( boost::filesystem::path( outputFolder ) ) ) { std::cerr << "ERROR: output folder \"" <<outputFolder<< "\" is not a directory" << std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } else if( verbose ) { std::cout << "Output folder: " << outputFolder << std::endl; } } else { std::cerr << "ERROR: no output folder stated" << std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } if ( memory < 0.1 || memory > 50) { std::cerr << "ERROR: cache size must be a positive float between 0.1 and 50 (GB)" << std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } else if( verbose ) { std::cout << "Tractogram cache memory: " << memory << " GBytes" << std::endl; } std::string logFilename(outputFolder+"/"+progName+"_log.txt" ); std::ofstream logFile(logFilename.c_str() ); if(!logFile) { std::cerr << "ERROR: unable to open log file: \"" <<logFilename<< "\"" << std::endl; exit(-1); } logFile << "Start Time:\t" << ctime(&programStartTime) << std::endl; logFile << "Working directory:\t" << workingDir.string() << std::endl; logFile << "Verbose:\t" << verbose << std::endl; logFile << "Processors used:\t" << threads << std::endl; if( niftiMode ) { logFile << "Using nifti file format" << std::endl; } else { logFile << "Using vista file format" << std::endl; } logFile << "Vista mode flag:\t" << verbose << std::endl; logFile << "Roi file:\t" << roiFilename << std::endl; logFile << "Max nb distance:\t" << maxNbDist << std::endl; logFile << "Nbhood restriction level:\t" <<nbLevel<< std::endl; switch(growType) { case TC_GROWOFF: logFile << "Region growing: None" << std::endl; break; case TC_GROWSIZE: logFile << "Region growing: Size: " << baseSize << std::endl; break; case TC_GROWNUM: logFile << "Region growing: Number: " << baseSize << std::endl; break; } logFile << "Input seed tract folder:\t" << inputFolder << std::endl; logFile << "Output folder:\t" << outputFolder << std::endl; logFile << "Memory cache size:\t" << memory << " GB" << std::endl; logFile << "Debug outputr:\t" << debug << std::endl; logFile << "-------------" << std::endl; ///////////////////////////////////////////////////////////////// randCnbTreeBuilder builder( roiFilename, verbose ); logFile << "Roi size:\t" << builder.roiSize() << std::endl; builder.log( &logFile ); builder.setInputFolder( inputFolder ); builder.setOutputFolder( outputFolder ); builder.setDebugOutput( debug ); builder.buildRandCentroid( nbLevel, memory, growType, baseSize, keepDiscarded ); ///////////////////////////////////////////////////////////////// // save and print total time time_t programEndTime(time(NULL) ); int totalTime( difftime(programEndTime,programStartTime) ); std::cout << "Program Finished, total time: " << totalTime/3600 << "h " << (totalTime%3600)/60 << "' " << ((totalTime%3600)%60) << "\" " << std::endl; logFile << "-------------" << std::endl; logFile << "Finish Time:\t" << ctime(&programEndTime) << std::endl; logFile << "Elapsed time : " << totalTime/3600 << "h " << (totalTime%3600)/60 << "' " << ((totalTime%3600)%60) << "\"" << std::endl; // create file that indicates process was finished successfully std::string successFilename(outputFolder+"/success.txt" ); std::ofstream successFile(successFilename.c_str() ); if(!successFile) { std::cerr << "ERROR: unable to create success file: \"" <<successFile<< "\"" << std::endl; exit(-1); } successFile << "success"; // } // catch(std::exception& e) // { // std::cout << e.what() << std::endl; // return 1; // } return 0; }
ClassAdLog::ClassAdLog(const char *filename,int max_historical_logs_arg) : table(CLASSAD_LOG_HASHTABLE_SIZE, hashFunction) { log_filename_buf = filename; active_transaction = NULL; m_nondurable_level = 0; this->max_historical_logs = max_historical_logs_arg; historical_sequence_number = 1; m_original_log_birthdate = time(NULL); int log_fd = safe_open_wrapper_follow(logFilename(), O_RDWR | O_CREAT | O_LARGEFILE, 0600); if (log_fd < 0) { EXCEPT("failed to open log %s, errno = %d", logFilename(), errno); } log_fp = fdopen(log_fd, "r+"); if (log_fp == NULL) { EXCEPT("failed to fdopen log %s, errno = %d", logFilename(), errno); } // Read all of the log records LogRecord *log_rec; unsigned long count = 0; bool is_clean = true; // was cleanly closed (until we find out otherwise) bool requires_successful_cleaning = false; long long next_log_entry_pos = 0; long long curr_log_entry_pos = 0; while ((log_rec = ReadLogEntry(log_fp, 1+count, InstantiateLogEntry)) != 0) { curr_log_entry_pos = next_log_entry_pos; next_log_entry_pos = ftell(log_fp); count++; switch (log_rec->get_op_type()) { case CondorLogOp_Error: // this is defensive, ought to be caught in InstantiateLogEntry() EXCEPT("ERROR: transaction record %lu was bad (byte offset %lld)\n", count, curr_log_entry_pos); break; case CondorLogOp_BeginTransaction: // this file contains transactions, so it must not // have been cleanly shut down is_clean = false; if (active_transaction) { dprintf(D_ALWAYS, "Warning: Encountered nested transactions in %s, " "log may be bogus...", filename); } else { active_transaction = new Transaction(); } delete log_rec; break; case CondorLogOp_EndTransaction: if (!active_transaction) { dprintf(D_ALWAYS, "Warning: Encountered unmatched end transaction in %s, " "log may be bogus...", filename); } else { active_transaction->Commit(NULL, (void *)&table); // commit in memory only delete active_transaction; active_transaction = NULL; } delete log_rec; break; case CondorLogOp_LogHistoricalSequenceNumber: if(count != 1) { dprintf(D_ALWAYS, "Warning: Encountered historical sequence number after first log entry (entry number = %ld)\n",count); } historical_sequence_number = ((LogHistoricalSequenceNumber *)log_rec)->get_historical_sequence_number(); m_original_log_birthdate = ((LogHistoricalSequenceNumber *)log_rec)->get_timestamp(); delete log_rec; break; default: if (active_transaction) { active_transaction->AppendLog(log_rec); } else { log_rec->Play((void *)&table); delete log_rec; } } } long long final_log_entry_pos = ftell(log_fp); if( next_log_entry_pos != final_log_entry_pos ) { // The log file has a broken line at the end so we _must_ // _not_ write anything more into this log. // (Alternately, we could try to clear out the broken entry // and continue writing into this file, but since we are about to // rotate the log anyway, we may as well just require the rotation // to be successful. In the case where rotation fails, we will // probably soon fail to write to the log file anyway somewhere else.) dprintf(D_ALWAYS,"Detected unterminated log entry in ClassAd Log %s." " Forcing rotation.\n", logFilename()); requires_successful_cleaning = true; } if (active_transaction) { // abort incomplete transaction delete active_transaction; active_transaction = NULL; if( !requires_successful_cleaning ) { // For similar reasons as with broken log entries above, // we need to force rotation. dprintf(D_ALWAYS,"Detected unterminated transaction in ClassAd Log" "%s. Forcing rotation.\n", logFilename()); requires_successful_cleaning = true; } } if(!count) { log_rec = new LogHistoricalSequenceNumber( historical_sequence_number, m_original_log_birthdate ); if (log_rec->Write(log_fp) < 0) { EXCEPT("write to %s failed, errno = %d", logFilename(), errno); } } if( !is_clean || requires_successful_cleaning ) { if( !TruncLog() && requires_successful_cleaning ) { EXCEPT("Failed to rotate ClassAd log %s.\n", logFilename()); } } }
bool ClassAdLog::TruncLog() { MyString tmp_log_filename; int new_log_fd; FILE *new_log_fp; dprintf(D_ALWAYS,"About to rotate ClassAd log %s\n",logFilename()); if(!SaveHistoricalLogs()) { dprintf(D_ALWAYS,"Skipping log rotation, because saving of historical log failed for %s.\n",logFilename()); return false; } tmp_log_filename.sprintf( "%s.tmp", logFilename()); new_log_fd = safe_open_wrapper_follow(tmp_log_filename.Value(), O_RDWR | O_CREAT | O_LARGEFILE, 0600); if (new_log_fd < 0) { dprintf(D_ALWAYS, "failed to rotate log: safe_open_wrapper(%s) returns %d\n", tmp_log_filename.Value(), new_log_fd); return false; } new_log_fp = fdopen(new_log_fd, "r+"); if (new_log_fp == NULL) { dprintf(D_ALWAYS, "failed to rotate log: fdopen(%s) returns NULL\n", tmp_log_filename.Value()); return false; } // Now it is time to move courageously into the future. historical_sequence_number++; LogState(new_log_fp); fclose(log_fp); log_fp = NULL; fclose(new_log_fp); // avoid sharing violation on move if (rotate_file(tmp_log_filename.Value(), logFilename()) < 0) { dprintf(D_ALWAYS, "failed to rotate job queue log!\n"); // Beat a hasty retreat into the past. historical_sequence_number--; int log_fd = safe_open_wrapper_follow(logFilename(), O_RDWR | O_APPEND | O_LARGEFILE, 0600); if (log_fd < 0) { EXCEPT("failed to reopen log %s, errno = %d after failing to rotate log.",logFilename(),errno); } log_fp = fdopen(log_fd, "a+"); if (log_fp == NULL) { EXCEPT("failed to refdopen log %s, errno = %d after failing to rotate log.",logFilename(),errno); } return false; } int log_fd = safe_open_wrapper_follow(logFilename(), O_RDWR | O_APPEND | O_LARGEFILE, 0600); if (log_fd < 0) { EXCEPT( "failed to open log in append mode: " "safe_open_wrapper(%s) returns %d\n", logFilename(), log_fd); } log_fp = fdopen(log_fd, "a+"); if (log_fp == NULL) { close(log_fd); EXCEPT("failed to fdopen log in append mode: " "fdopen(%s) returns %d\n", logFilename(), log_fd); } return true; }
int main( int argc, char *argv[] ) { // try { time_t programStartTime(time(NULL)); boost::filesystem::path workingDir( boost::filesystem::current_path()); // ========== PROGRAM PARAMETERS ========== std::string progName("partitiontree"); std::string configFilename("../../config/"+progName+".cfg"); unsigned int threads(0), levelDepth(3), filterRadius(0); bool verbose(false), niftiMode( true ); // program parameters std::string treeFilename, outputFolder; // Declare a group of options that will be allowed only on command line boost::program_options::options_description genericOptions("Generic options"); genericOptions.add_options() ( "version", "Program version" ) ( "help,h", "Produce extended program help message" ) ( "tree,t", boost::program_options::value< std::string >(&treeFilename), "file with the tree to compute partitions from") ( "outputf,O", boost::program_options::value< std::string >(&outputFolder), "output folder where partition files will be written") ( "search-depth,d", boost::program_options::value< unsigned int >(&levelDepth)->implicit_value(3), "[opt] optimal partition search depth (default = 3)") ( "filter-radius,r", boost::program_options::value< unsigned int >(&filterRadius)->implicit_value(0), "[opt] output partition filter kernel radius (default = 0 | no filtering)") ( "hoz", "[opt] obtain horizontal cut partitions (instead of Spread-Separation ones)") ( "maxgran,m", "[opt] obtain only the maximum granularity partition") ; // Declare a group of options that will be allowed both on command line and in config file boost::program_options::options_description configOptions("Configuration"); configOptions.add_options() ( "verbose,v", "[opt] verbose output." ) ( "vista", "[opt] use vista file format (default is nifti)." ) ( "pthreads,p", boost::program_options::value< unsigned int >(&threads), "[opt] number of processing threads to run the program in parallel, default: all available") ; // Hidden options, will be allowed both on command line and in config file, but will not be shown to the user. boost::program_options::options_description hiddenOptions("Hidden options"); //hiddenOptions.add_options() ; boost::program_options::options_description cmdlineOptions; cmdlineOptions.add(genericOptions).add(configOptions).add(hiddenOptions); boost::program_options::options_description configFileOptions; configFileOptions.add(configOptions).add(hiddenOptions); boost::program_options::options_description visibleOptions("Allowed options"); visibleOptions.add(genericOptions).add(configOptions); boost::program_options::positional_options_description posOpt; //this arguments do not need to specify the option descriptor when typed in //posOpt.add("roi-file", -1); boost::program_options::variables_map variableMap; store(boost::program_options::command_line_parser(argc, argv).options(cmdlineOptions).positional(posOpt).run(), variableMap); std::ifstream ifs(configFilename.c_str()); store(parse_config_file(ifs, configFileOptions), variableMap); notify(variableMap); if (variableMap.count("help")) { std::cout << "---------------------------------------------------------------------------" << std::endl; std::cout << std::endl; std::cout << " Project: hClustering" << std::endl; std::cout << std::endl; std::cout << " Whole-Brain Connectivity-Based Hierarchical Parcellation Project" << std::endl; std::cout << " David Moreno-Dominguez" << std::endl; std::cout << " [email protected]" << std::endl; std::cout << " [email protected]" << std::endl; std::cout << " www.cbs.mpg.de/~moreno" << std::endl; std::cout << std::endl; std::cout << " For more reference on the underlying algorithm and research they have been used for refer to:" << std::endl; std::cout << " - Moreno-Dominguez, D., Anwander, A., & Knösche, T. R. (2014)." << std::endl; std::cout << " A hierarchical method for whole-brain connectivity-based parcellation." << std::endl; std::cout << " Human Brain Mapping, 35(10), 5000-5025. doi: http://dx.doi.org/10.1002/hbm.22528" << std::endl; std::cout << " - Moreno-Dominguez, D. (2014)." << std::endl; std::cout << " Whole-brain cortical parcellation: A hierarchical method based on dMRI tractography." << std::endl; std::cout << " PhD Thesis, Max Planck Institute for Human Cognitive and Brain Sciences, Leipzig." << std::endl; std::cout << " ISBN 978-3-941504-45-5" << std::endl; std::cout << std::endl; std::cout << " hClustering is free software: you can redistribute it and/or modify" << std::endl; std::cout << " it under the terms of the GNU Lesser General Public License as published by" << std::endl; std::cout << " the Free Software Foundation, either version 3 of the License, or" << std::endl; std::cout << " (at your option) any later version." << std::endl; std::cout << " http://creativecommons.org/licenses/by-nc/3.0" << std::endl; std::cout << std::endl; std::cout << " hClustering is distributed in the hope that it will be useful," << std::endl; std::cout << " but WITHOUT ANY WARRANTY; without even the implied warranty of" << std::endl; std::cout << " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" << std::endl; std::cout << " GNU Lesser General Public License for more details." << std::endl; std::cout << std::endl; std::cout << "---------------------------------------------------------------------------" << std::endl << std::endl; std::cout << "partitiontree" << std::endl << std::endl; std::cout << "Obtain tree partitions at all granularity levels using the Spread-Separation method (finding the the partition with highest SS index at each granularity)." << std::endl; std::cout << " Optimal SS value for each partition is searched within a defined search-depth hierarchical levels. Final partitions can be filtered with a defined kernel size." << std::endl; std::cout << " to keep local SS maxima within that kernel. For SS index refer to (Moreno-Dominguez, 2014)" << std::endl; std::cout << " For an interactive 3D partition management with more options please use the Hierarchcial Clustering module developed in OpenWalnut (www.openwalnut.org)." << std::endl << std::endl; std::cout << "* Arguments:" << std::endl << std::endl; std::cout << " --version: Program version." << std::endl << std::endl; std::cout << " -h --help: produce extended program help message." << std::endl << std::endl; std::cout << " -t --tree: File with the hierarchical tree to extract partitions from." << std::endl << std::endl; std::cout << " -O --outputf: Output folder where partition files will be written." << std::endl << std::endl; std::cout << "[-d --search-depth]: Search optimal partition for each granularity within d hierarchical levels." << std::endl; std::cout << " A higher value will produce more optimized partition but will increase computing time." << std::endl; std::cout << " Default: 3. Recommendened values: 3 for good quality and fast computation, 4 for enhanced quality." << std::endl << std::endl; std::cout << "[-r --filter-radius]: Filter output partitions to keep only local SS (partition quality) maxima" << std::endl; std::cout << " within a r-sized kernel across the granularity dimension." << std::endl << std::endl; std::cout << "[-h --hoz]: Write horizontal cut partitions instead of SS ones (optimal partition search is still based on SS index)." << std::endl << std::endl; std::cout << "[-m --maxgran]: Compute and write only the maximum granularity (meta-leaves) partition." << std::endl << std::endl; std::cout << "[-v --verbose]: verbose output (recommended)." << std::endl << std::endl; std::cout << "[--vista]: write output tree in vista coordinates (default is nifti)." << std::endl << std::endl; std::cout << "[-p --pthreads]: number of processing threads to run the program in parallel. Default: use all available processors." << std::endl << std::endl; std::cout << std::endl; std::cout << "* Usage example:" << std::endl << std::endl; std::cout << " partitiontree -t tree_lh.txt -O results/ -d 3 -r 50 -v" << std::endl << std::endl; std::cout << std::endl; std::cout << "* Outputs (in output folder defined at option -O):" << std::endl << std::endl; std::cout << " (default outputs)" << std::endl; std::cout << " - 'allSSparts_dX.txt' - (where X is the search depth level defined at parameter -d) Contains a summary of the partition information (cut value and size) for all granularities." << std::endl; std::cout << " - 'TREE_SSparts_dX.txt' - (where TREE is the filename of the input tree defined at parameter -t) contains a copy of the original tree file with the partitions at all granularities included in the relevant fields." << std::endl; std::cout << " - 'partitiontree_log.txt' - A text log file containing the parameter details and in-run and completion information of the program." << std::endl; std::cout << std::endl; std::cout << " (additional if using option -r)" << std::endl; std::cout << " - 'filtSSparts_dX_rY.txt' - (where Y is the filter radius defined at parameter -r) Contains a summary of the resulting filtered partitions." << std::endl; std::cout << " - 'TREE_SSparts_dX_rY.txt' - contains a copy of the original tree file with the resulting filtered partitions included in the relevant fields." << std::endl; std::cout << std::endl; std::cout << " (when using --hoz option, the prefix 'SS' will be replaced by 'Hoz'')" << std::endl; std::cout << std::endl; std::cout << " (alternative outputs when using option --maxgran)" << std::endl; std::cout << " - 'fmaxgranPart.txt' - Contains the size information of the resulting maximal granularity partition for that tree." << std::endl; std::cout << " - 'TREE_maxgranPart.txt' - contains a copy of the original tree file with the resulting max granularity partition included in the relevant fields." << std::endl; std::cout << std::endl; exit(0); } if (variableMap.count("version")) { std::cout << progName <<", version 2.0"<<std::endl; exit(0); } if (variableMap.count("verbose")) { std::cout << "verbose output"<<std::endl; verbose=true; } if (variableMap.count("pthreads")) { if (threads==1) { std::cout <<"Using a single processor"<< std::endl; } else if(threads==0 || threads>=omp_get_num_procs()) { threads = omp_get_num_procs(); std::cout <<"Using all available processors ("<< threads <<")." << std::endl; } else { std::cout <<"Using a maximum of "<< threads <<" processors "<< std::endl; } omp_set_num_threads( threads ); } else { threads = omp_get_num_procs(); omp_set_num_threads( threads ); std::cout <<"Using all available processors ("<< threads <<")." << std::endl; } if ( variableMap.count( "vista" ) ) { if( verbose ) { std::cout << "Using vista format" << std::endl; } fileManagerFactory fmf; fmf.setVista(); niftiMode = false; } else { if( verbose ) { std::cout << "Using nifti format" << std::endl; } fileManagerFactory fmf; fmf.setNifti(); niftiMode = true; } if (variableMap.count("tree")) { if(!boost::filesystem::is_regular_file(boost::filesystem::path(treeFilename))) { std::cerr << "ERROR: tree file \""<<treeFilename<<"\" is not a regular file"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } std::cout << "Roi voxels file: "<< treeFilename << std::endl; } else { std::cerr << "ERROR: no tree file stated"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } if (variableMap.count("outputf")) { if(!boost::filesystem::is_directory(boost::filesystem::path(outputFolder))) { std::cerr << "ERROR: output folder \""<<outputFolder<<"\" is not a directory"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } std::cout << "Output folder: "<< outputFolder << std::endl; } else { std::cerr << "ERROR: no output folder stated"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } if (variableMap.count("maxgran")) { std::cout<<"Obtaining only max. granularity partition..."<<std::endl; WHtree tree(treeFilename); std::cout<<tree.getReport( false )<<std::endl; if( tree.testRootBaseNodes() ) { std::vector<size_t > maxpart( tree.getRootBaseNodes() ); std::vector<std::vector<size_t > > partitionVector( 1, maxpart); std::vector<float > partitionValues(1,0); std::cout<<"maxgranpart size: "<<std::endl<<maxpart.size()<<std::endl; WHtreePartition partitioner(&tree); std::string outPartFilename( outputFolder + "/maxgranPart.txt" ); partitioner.writePartitionSet( outPartFilename, partitionValues,partitionVector); tree.insertPartitions( partitionVector, partitionValues ); std::string outTreeFilename( outputFolder + "/" + tree.getName() + "_maxgranPart" ); outTreeFilename += ( ".txt" ); tree.writeTree( outTreeFilename, niftiMode ); return 0; } else { std::cout<<"ERROR: tree does not have a maximum granularity meta-leaf partition"<<std::endl; return(-1); } } if( levelDepth > 5 ) { std::cout << "Level depth indicated: " << levelDepth << " is too high, setting to a maximum of 5" << std::endl; levelDepth = 5; } std::cout << "Using a search depth of: " << levelDepth << std::endl; if( filterRadius > 1000 ) { std::cout << "filter radius indicated: " << filterRadius << " is too high (max is 1000), setting to 100" << std::endl; filterRadius = 10; } if( filterRadius == 0 ) { std::cout << "using no filtering (radius 0)" << std::endl; } else if( filterRadius < 0 ) { std::cout << "filter radius indicated: " << filterRadius << " must be positive. using no filtering (radius 0)" << std::endl; filterRadius = 0; } else { std::cout << "Using a filter radius of: " << filterRadius << std::endl; } ///////////////////////////////////////////////////////////////// std::string logFilename(outputFolder+"/"+progName+"_log.txt"); std::ofstream logFile(logFilename.c_str()); if(!logFile) { std::cerr << "ERROR: unable to open log file: \""<<logFilename<<"\""<<std::endl; exit(-1); } logFile <<"Start Time:\t"<< ctime(&programStartTime) <<std::endl; logFile <<"Working directory:\t"<< workingDir.string() <<std::endl; logFile <<"Verbose:\t"<< verbose <<std::endl; logFile <<"Tree file:\t"<< treeFilename <<std::endl; logFile <<"Output folder:\t"<< outputFolder <<std::endl; logFile <<"Verbose:\t"<< verbose <<std::endl; if( niftiMode ) { logFile << "Using nifti file format" << std::endl; } else { logFile << "Using vista file format" << std::endl; } WHtree tree(treeFilename); logFile << tree.getReport( false ) <<std::endl; std::cout<<tree.getReport( false )<<std::endl; std::vector< float > partitionValues; std::vector< std::vector< size_t> > partitionVector; WHtreePartition treePartition(&tree); std::string prefix; if (variableMap.count("hoz")) { prefix = "Hoz"; std::cout <<"getting hoz partitions at all levels..." <<std::endl; treePartition.scanHozPartitions( &partitionValues, &partitionVector ); std::cout << partitionValues.size() << " Partitions obtained, writing to file..." <<std::endl; logFile <<"Initial partitions:\t"<< partitionValues.size() <<std::endl; std::string outPartFilename( outputFolder + "/all" + prefix + "parts.txt" ); treePartition.writePartitionSet( outPartFilename, partitionValues, partitionVector); tree.insertPartitions( partitionVector, partitionValues ); std::string outTreeFilename( outputFolder + "/" + tree.getName() + "_" + prefix + "parts_d" + boost::lexical_cast<std::string>(levelDepth) ); outTreeFilename += ( ".txt" ); tree.writeTree( outTreeFilename, niftiMode ); } else { prefix = "SS"; std::cout <<"getting SS partitions at all levels..." <<std::endl; treePartition.scanOptimalPartitions( levelDepth, &partitionValues, &partitionVector ); std::cout << partitionValues.size() << " Partitions obtained, writing to file..." <<std::endl; logFile <<"Initial partitions:\t"<< partitionValues.size() <<std::endl; std::string outPartFilename( outputFolder + "/all" + prefix + "parts_d" + boost::lexical_cast<std::string>(levelDepth) + ".txt" ); treePartition.writePartitionSet( outPartFilename, partitionValues, partitionVector); tree.insertPartitions( partitionVector, partitionValues ); std::string outTreeFilename( outputFolder + "/" + tree.getName() + "_" + prefix + "parts_d" + boost::lexical_cast<std::string>(levelDepth) ); outTreeFilename += ( ".txt" ); tree.writeTree( outTreeFilename, niftiMode ); } std::vector < unsigned int > filterRadii; //filterRadii.reserve( 6 ); // filterRadii.push_back( 1 ); // filterRadii.push_back( 2 ); // filterRadii.push_back( 5 ); // filterRadii.push_back( 10 ); // filterRadii.push_back( 15 ); // filterRadii.push_back( 20 ); filterRadii.push_back( filterRadius ); for(size_t i=0; i< filterRadii.size(); ++i) { if( filterRadii[i] <= 0 ) { continue; } std::vector< float > filtPartValues( partitionValues ); std::vector< std::vector< size_t> > filtPartVector( partitionVector ); std::cout << "Filtering with a radius of "<< filterRadii[i] << "..." <<std::endl; treePartition.filterMaxPartitions( filterRadii[i], &filtPartValues, &filtPartVector ); std::cout << filtPartValues.size() << " Filtered partitions obtained, writing to file..." <<std::endl; logFile <<"Filtered partitions:\t"<< filtPartValues.size() <<std::endl; std::string outPartFilename( outputFolder + "/filt" + prefix + "parts_d" + boost::lexical_cast<std::string>(levelDepth) ); outPartFilename += ( "_r" + boost::lexical_cast<std::string>(filterRadii[i]) + ".txt" ); treePartition.writePartitionSet(outPartFilename, filtPartValues, filtPartVector); std::cout << "Adding filtered partitions to tree and writing..." <<std::endl; std::string outTreeFilename( outputFolder + "/" + tree.getName() + "_" + prefix + "parts_d" + boost::lexical_cast<std::string>(levelDepth) ); outTreeFilename += ( "_r" + boost::lexical_cast<std::string>(filterRadii[i]) + ".txt" ); tree.insertPartitions( filtPartVector, filtPartValues ); tree.writeTree( outTreeFilename, niftiMode ); } ///////////////////////////////////////////////////////////////// // save and print total time time_t programEndTime(time(NULL)); int totalTime( difftime(programEndTime,programStartTime) ); std::cout <<"Program Finished, total time: "<< totalTime/3600 <<"h "<< (totalTime%3600)/60 <<"' "<< ((totalTime%3600)%60) <<"\" "<< std::endl; logFile <<"-------------"<<std::endl; logFile <<"Finish Time:\t"<< ctime(&programEndTime) <<std::endl; logFile <<"Elapsed time : "<< totalTime/3600 <<"h "<< (totalTime%3600)/60 <<"' "<< ((totalTime%3600)%60) <<"\""<< std::endl; // } // catch(std::exception& e) // { // std::cout << e.what() << std::endl; // return 1; // } return 0; }
int main( int argc, char *argv[] ) { // try { time_t programStartTime(time(NULL)); boost::filesystem::path workingDir( boost::filesystem::current_path()); // ========== PROGRAM PARAMETERS ========== std::string progName("matchpartition"); std::string configFilename("../../config/"+progName+".cfg"); // program parameters std::string refTreeFilename, targetTreeFilename, matchTableFilename, outputFolder; unsigned int searchDepth(1); float lambda(0); bool signaturePart(false), colorMatching(false), overlapPart(false), exclusive(false); bool verbose(false), niftiMode( true ); // Declare a group of options that will be allowed only on command line boost::program_options::options_description genericOptions("Generic options"); genericOptions.add_options() ( "version", "Program version" ) ( "help,h", "Produce extended program help message" ) ( "reference,r", boost::program_options::value< std::string >(&refTreeFilename), "file with reference partitioned tree" ) ( "target,t", boost::program_options::value< std::string >(&targetTreeFilename), "file with target tree to be partitioned-matched" ) ( "leafmatch,m", boost::program_options::value< std::string >(&matchTableFilename), "file with meta-leaves (base-nodes) matching table" ) ( "outputf,O", boost::program_options::value< std::string >(&outputFolder), "output folder where partition-matched trees will be written" ) ( "signature,s", boost::program_options::value< float >(&lambda), "[xor with -o and -c] Signature-based partition matching, inster lambda coefficient value" ) ( "overlap,o", "[xor with -s and -c] Meta-leaf overlap-based partition matching") ( "depth,d", boost::program_options::value< unsigned int >(&searchDepth)->implicit_value(0), "[opt] partition search depth. Default: 0 (automatic partition-size based adaptive depth, recommended)") ( "justcolor,c", "[xor with -s and -o] Perform only olor matching (requires pre-computed partitions in both trees)") ( "excl,x", "[opt] color exclusively clusters that have a match, clusters without match will be white") ; // Declare a group of options that will be allowed both on command line and in config file boost::program_options::options_description configOptions("Configuration"); configOptions.add_options() ( "verbose,v", "[opt] verbose output." ) ( "vista", "[opt] Write output tree in vista coordinates (default is nifti)." ) ; // Hidden options, will be allowed both on command line and in config file, but will not be shown to the user. boost::program_options::options_description hiddenOptions("Hidden options"); //hiddenOptions.add_options() ; boost::program_options::options_description cmdlineOptions; cmdlineOptions.add(genericOptions).add(configOptions).add(hiddenOptions); boost::program_options::options_description configFileOptions; configFileOptions.add(configOptions).add(hiddenOptions); boost::program_options::options_description visibleOptions("Allowed options"); visibleOptions.add(genericOptions).add(configOptions); boost::program_options::positional_options_description posOpt; //this arguments do not need to specify the option descriptor when typed in //posOpt.add("roi-file", -1); boost::program_options::variables_map variableMap; store(boost::program_options::command_line_parser(argc, argv).options(cmdlineOptions).positional(posOpt).run(), variableMap); std::ifstream ifs(configFilename.c_str()); store(parse_config_file(ifs, configFileOptions), variableMap); notify(variableMap); if (variableMap.count( "help" ) ) { std::cout << "---------------------------------------------------------------------------" << std::endl; std::cout << std::endl; std::cout << " Project: hClustering" << std::endl; std::cout << std::endl; std::cout << " Whole-Brain Connectivity-Based Hierarchical Parcellation Project" << std::endl; std::cout << " David Moreno-Dominguez" << std::endl; std::cout << " [email protected]" << std::endl; std::cout << " [email protected]" << std::endl; std::cout << " www.cbs.mpg.de/~moreno" << std::endl; std::cout << std::endl; std::cout << " For more reference on the underlying algorithm and research they have been used for refer to:" << std::endl; std::cout << " - Moreno-Dominguez, D., Anwander, A., & Knösche, T. R. (2014)." << std::endl; std::cout << " A hierarchical method for whole-brain connectivity-based parcellation." << std::endl; std::cout << " Human Brain Mapping, 35(10), 5000-5025. doi: http://dx.doi.org/10.1002/hbm.22528" << std::endl; std::cout << " - Moreno-Dominguez, D. (2014)." << std::endl; std::cout << " Whole-brain cortical parcellation: A hierarchical method based on dMRI tractography." << std::endl; std::cout << " PhD Thesis, Max Planck Institute for Human Cognitive and Brain Sciences, Leipzig." << std::endl; std::cout << " ISBN 978-3-941504-45-5" << std::endl; std::cout << std::endl; std::cout << " hClustering is free software: you can redistribute it and/or modify" << std::endl; std::cout << " it under the terms of the GNU Lesser General Public License as published by" << std::endl; std::cout << " the Free Software Foundation, either version 3 of the License, or" << std::endl; std::cout << " (at your option) any later version." << std::endl; std::cout << " http://creativecommons.org/licenses/by-nc/3.0" << std::endl; std::cout << std::endl; std::cout << " hClustering is distributed in the hope that it will be useful," << std::endl; std::cout << " but WITHOUT ANY WARRANTY; without even the implied warranty of" << std::endl; std::cout << " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" << std::endl; std::cout << " GNU Lesser General Public License for more details." << std::endl; std::cout << std::endl; std::cout << "---------------------------------------------------------------------------" << std::endl << std::endl; std::cout << "matchpartition" << std::endl << std::endl; std::cout << "Finds the best matching corresponding partitions in a target tree to those present in an unrelated reference tree (meta-leaf matching across these two trees must have been precomputed using comparetrees)." << std::endl; std::cout << " Two partition matching algorithms are available: signature matching and overlap matching. Found target partitions will be color-matched as best as possible." << std::endl; std::cout << " There is also the possibility of only color-matching predefined partitions of the target tree to predefined partitions of the reference tree." << std::endl << std::endl; std::cout << "* Arguments:" << std::endl << std::endl; std::cout << " --version: Program version." << std::endl << std::endl; std::cout << " -h --help: produce extended program help message." << std::endl << std::endl; std::cout << " -r --reference: The tree file with the reference partitioned tree." << std::endl << std::endl; std::cout << " -t --target: The tree file with the target tree to find matching partitions in (or with partitions to be color-matched)." << std::endl << std::endl; std::cout << " -m --leafmatch File with the meta-leaf matching information across both trees (output of comparetrees command)." << std::endl << std::endl; std::cout << " -O --outputf: Output folder where partitioned/color matched tree files will be written." << std::endl << std::endl; std::cout << "[-s --signature]: Signature-based partition matching, instert lambda coefficient value. [xor with -o and -c]." << std::endl; std::cout << " In this method a pair signature matrices are computed for each reference-target partitions to find the quality of the match." << std::endl; std::cout << " Each signature matrix defines a value for each pair of base-nodes of the tree it belongs to: 1 the base nodes are found in the same cluster, 0 if otherwise." << std::endl; std::cout << " The higher the correlation between the reference and target-derived matrices, the best match is the target tree partition to the reference tree one." << std::endl; std::cout << " A smart hierarchical search through possible partritions is conducted to find the one with best signature matching." << std::endl; std::cout << " The lambda coefficient determines if and how a similar number of clusters in both partitions affects the matching quality value," << std::endl; std::cout << " Lambda=0 -> cluster number does not affect the quality value. Lambda=1 -> cluster value similarity has as much weight as singature correlation." << std::endl << std::endl; std::cout << "[-o --overlap]: Overlap-based partition matching. [xor with -o and -c]." << std::endl; std::cout << " A match between two partititionsis found by iteratively matching clusters with higher base-node overlap and resolving possible ambiguities." << std::endl; std::cout << " The matching quality between partitions is defined as the number of base-nodes pairs that are classified in the same way in both partitions" << std::endl; std::cout << " (both in the smae cluster r both in different clusters) against the total number of pair combinations." << std::endl; std::cout << " A smart hierarchical search through possible partritions is conducted to find the one with best signature matching." << std::endl << std::endl; std::cout << "[-d --depth]: Partition search depth (for signature and overlap matching. A higher value will mean a more exhaustive search of the possible partitions," << std::endl; std::cout << " but also a higher computation time, specially if the partition to be matched has a high number of clusters (>100)." << std::endl; std::cout << " The default value (0, recommended) will adaptively give high search depth to low-cluster partitions and lower search depth to high-cluster partittions." << std::endl << std::endl; std::cout << "[-c --justcolor]: Perform only color matching across reference and target tree parttitions (both trees need to have the same number of precompouted partitions)." << std::endl; std::cout << " In multiple-to-one matching cases clusters from the reference tree might also be recolored to better identify matching relationships across partitions." << std::endl << std::endl; std::cout << "[-x --excl]: Color exclusively clusters that have a match, clusters without match will be recolored white (on both reference and target trees)" << std::endl << std::endl; std::cout << "[-v --verbose]: Verbose output (recommended)." << std::endl << std::endl; std::cout << "[--vista]: Write output tree files in vista coordinates (default is nifti)." << std::endl << std::endl; std::cout << std::endl; std::cout << "* Usage example:" << std::endl << std::endl; std::cout << " matchpartition -r refTree.txt -t targetTree.txt -m matching.txt -O results/ -s 0.5 -v" << std::endl << std::endl; exit(0); } if (variableMap.count( "version" ) ) { std::cout << progName <<", version 2.0"<<std::endl; exit(0); } if ( variableMap.count( "verbose" ) ) { std::cout << "verbose output" << std::endl; verbose=true; } if ( variableMap.count( "vista" ) ) { if( verbose ) { std::cout << "Using vista coordinates" << std::endl; } fileManagerFactory fmf; fmf.setVista(); niftiMode = false; } else { if( verbose ) { std::cout << "Using nifti coordinates" << std::endl; } fileManagerFactory fmf; fmf.setNifti(); niftiMode = true; } if (variableMap.count("reference")) { if(!boost::filesystem::is_regular_file( boost::filesystem::path( refTreeFilename ) ) ) { std::cerr << "ERROR: reference tree file \""<<refTreeFilename<<"\" is not a regular file"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } std::cout << "Reference tree file: "<< refTreeFilename << std::endl; } else { std::cerr << "ERROR: no reference tree file stated"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } if (variableMap.count( "target" ) ) { if(!boost::filesystem::is_regular_file(boost::filesystem::path( targetTreeFilename ) ) ) { std::cerr << "ERROR: target tree file \""<<targetTreeFilename<<"\" is not a regular file"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } std::cout << "Target tree file: "<< targetTreeFilename << std::endl; } else { std::cerr << "ERROR: no target tree file stated"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } if (variableMap.count( "leafmatch" ) ) { if(!boost::filesystem::is_regular_file(boost::filesystem::path( matchTableFilename ) ) ) { std::cerr << "ERROR: match table file \""<<matchTableFilename<<"\" is not a regular file"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } std::cout << "Match table file: "<< matchTableFilename << std::endl; } else { std::cerr << "ERROR: no match Table file stated"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } if( variableMap.count( "signature" ) + variableMap.count( "overlap" ) + variableMap.count( "justcolor" ) > 1 ) { std::cerr << "ERROR: multiple matching types selected, please use only one from -s, -o, -c."<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } if (variableMap.count( "signature" ) ) { std::cout << "Performing Signature partition matching (and color matching)" << std::endl; std::cout <<" Using a lambda factor of "<< lambda << std::endl; signaturePart = true; colorMatching = true; } else if (variableMap.count( "overlap" ) ) { std::cout << "Performing Overlap partition matching (and color matching): " << std::endl; overlapPart = true; colorMatching = true; } else if (variableMap.count( "justcolor" ) ) { std::cout << "Performing only color matching: " << std::endl; colorMatching = true; } else { std::cerr << "ERROR: no matching type selected, select signature, overlap or color matching"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } if (variableMap.count( "excl" ) ) { std::cout << "Color exclusively matched clusters (unmatched clusters will be white) " << std::endl; exclusive = true; } if( signaturePart || overlapPart ) { if( searchDepth > 5 ) { std::cout << "Level depth indicated: " << searchDepth << " is too high, setting to a maximum of 5" << std::endl; searchDepth = 5; } else if ( searchDepth = 0 ) { std::cout << "Using automatic parttion-size based adaptive search depth " << std::endl; } else { std::cout << "Using a search depth of: " << searchDepth << std::endl; } } if (variableMap.count( "outputf" ) ) { if(!boost::filesystem::is_directory(boost::filesystem::path( outputFolder ) ) ) { std::cerr << "ERROR: output folder \""<<outputFolder<<"\" is not a directory"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } std::cout << "Output folder: "<< outputFolder << std::endl; } else { std::cerr << "ERROR: no output folder stated"<<std::endl; std::cerr << visibleOptions << std::endl; exit(-1); } std::string logFilename(outputFolder+"/"+progName+"_log.txt"); std::ofstream logFile(logFilename.c_str()); if(!logFile) { std::cerr << "ERROR: unable to open log file: \""<<logFilename<<"\""<<std::endl; exit(-1); } logFile <<"Start Time:\t"<< ctime(&programStartTime) <<std::endl; logFile <<"Working directory:\t"<< workingDir.string() <<std::endl; logFile <<"Verbose:\t"<< verbose <<std::endl; logFile <<"Reference tree:\t"<< refTreeFilename <<std::endl; logFile <<"Target tree:\t"<< targetTreeFilename <<std::endl; logFile <<"Matching table:\t"<< matchTableFilename <<std::endl; logFile <<"Output folder:\t"<< outputFolder <<std::endl; if( niftiMode ) { logFile << "Using nifti coordinates" << std::endl; } else { logFile << "Using vista coordinates" << std::endl; } logFile <<"-------------"<<std::endl; ///////////////////////////////////////////////////////////////// WHtree refTree( refTreeFilename ); WHtree targetTree( targetTreeFilename ); if (!refTree.isLoaded() || !targetTree.isLoaded() ) { throw std::runtime_error ("ERROR @ compareTrees(): trees are not loaded"); } logFile <<"Reference Tree: "<< refTree.getReport(false) <<std::endl; logFile <<"Target Tree: "<< targetTree.getReport(false) <<std::endl; if (refTree.getDataSize() != targetTree.getDataSize() ) { std::cerr <<"Reference Tree: "<< refTree.getReport() <<std::endl; std::cerr <<"Target Tree: "<< targetTree.getReport() <<std::endl; throw std::runtime_error ("ERROR @ compareTrees() datasets have different dimensions"); } if (verbose) { std::cout <<"Reference Tree: "<< refTree.getReport(false) <<std::endl; std::cout <<"Target Tree: "<< targetTree.getReport(false) <<std::endl; } partitionMatcher matcher(&refTree,&targetTree,matchTableFilename, verbose); std::string depthString; if( searchDepth > 0 ) { depthString = "_d"+string_utils::toString< unsigned int >( searchDepth ) ; } std::cout <<matcher.reportBaseNodes()<<std::endl; std::string suffixPart("_pm_Signature_l" + string_utils::toString< float >( lambda ) + depthString + ".txt"); std::string suffixNew("_pm_Overlap" + depthString + ".txt"); std::string suffixColor("_colorMatch.txt"); bool refTreeColorsChanged( false ); if( signaturePart ) { logFile << "Signature Matching" << std::endl; logFile << "Lambda:\t" << lambda <<std::endl; logFile << "Search depth:\t" << searchDepth <<std::endl; matcher.findMatchingPartitions( lambda ); } else if ( overlapPart ) { logFile << "Overlap Matching" <<std::endl; logFile << "Search depth:\t" << searchDepth <<std::endl; matcher.findMatchingPartitions( -1 ); } if( colorMatching ) { logFile << "Color Matching" <<std::endl; refTreeColorsChanged = matcher.matchColors( exclusive ); } std::string refOutput; std::string targetOutput; if ( signaturePart ) { targetOutput = outputFolder + "/" + targetTree.getName() + suffixPart; } else if ( overlapPart ) { targetOutput = outputFolder + "/" + targetTree.getName() + suffixNew ; } else { targetOutput = outputFolder + "/" + targetTree.getName() + suffixColor; } if( refTreeColorsChanged ) { refOutput = outputFolder + "/" + refTree.getName() + suffixColor; } else { refOutput = outputFolder + "/" + refTree.getName() + ".txt"; } if( verbose ) { std::cout << "Writing output target tree file to " << targetOutput << std::endl; std::cout << "Writing output reference tree file to " << refOutput << std::endl; } targetTree.writeTree( targetOutput, niftiMode ); refTree.writeTree( refOutput, niftiMode ); logFile << "Written output target tree file to " << targetOutput << std::endl; logFile << "Written output reference tree file to " << refOutput << std::endl; ///////////////////////////////////////////////////////////////// // save and print total time time_t programEndTime( time( NULL ) ); int totalTime( difftime( programEndTime, programStartTime ) ); std::cout <<"Program Finished, total time: "<< totalTime/3600 <<"h "<< (totalTime%3600)/60 <<"' "<< ((totalTime%3600)%60) <<"\" "<< std::endl; logFile <<"-------------"<<std::endl; logFile <<"Finish Time:\t"<< ctime(&programEndTime) <<std::endl; logFile <<"Elapsed time : "<< totalTime/3600 <<"h "<< (totalTime%3600)/60 <<"' "<< ((totalTime%3600)%60) <<"\""<< std::endl; // } // catch(std::exception& e) // { // std::cout << e.what() << std::endl; // return 1; // } return 0; }