int main(int argc, char *argv[]) { // Have to change this to only encoder if(argc < 7) { std::cout<< "All File arguments are required!!" << std::endl; exit(-1); } uint32_t search_area = std::stoi(argv[1], nullptr, 10); int32_t intra_interval = std::stoi(argv[2], nullptr, 10); uint32_t unique_interval = std::stoi(argv[3], nullptr, 10); int32_t vErrThreshold = std::stoi(argv[4], nullptr, 10); uint32_t thread_count = std::stoi(argv[5], nullptr, 10); std::string dir_path(argv[6]); std::string out_file_path(argv[7]); std::string out_dir(argv[8]); std::string ep_dir; if(argc > 7) ep_dir = std::string(argv[9]); //MPTC::CompressPNGStream(dir_path, out_file_path, search_area, vErrThreshold, interval, ep_dir); //MPTC::DecompressMPTCStream(out_file_path, out_dir, interval); MPTC::CompressMultiUnique(dir_path, out_file_path, search_area, vErrThreshold, intra_interval, unique_interval, ep_dir); //MPTC::DecompressMultiUnique(out_file_path, out_dir, ep_dir); // // /* uint32_t wavelet_block_sz;*/ //auto t1 = std::chrono::high_resolution_clock::now(); //MPTC::ThreadedCompressMultiUnique(dir_path, //out_file_path, //search_area, //vErrThreshold, //intra_interval, //unique_interval, //wavelet_block_sz, //ep_dir, //thread_count); //auto t2 = std::chrono::high_resolution_clock::now(); //std::chrono::duration<double> fp_ms = t2 - t1; //std::cout << "Time:" << fp_ms.count() << std::endl; /*MPTC::DecompressMultiUnique(out_file_path, out_dir, ep_dir);*/ return 0; }
void network_generator::random_in_out_let(vector < pair <int, int> > &inlet, vector < pair <int, int> > &outlet){ vector <int> in_dir(inlet.size()), out_dir(inlet.size()); vector < vector <int> > valid(4, vector <int>(101)); for( int i=0;i<in_dir.size();i++ ){ in_dir[i] = rand()%4 + 1; out_dir[i] = rand()%4 + 1; } while(in_dir[1] == in_dir[0]){ in_dir[1] = in_dir[1]%4 + 1; } while(in_dir[2] == in_dir[0] || in_dir[2] == in_dir[1]){ in_dir[2] = in_dir[2]%4 + 1; } while(in_dir[3] == in_dir[0] || in_dir[3] == in_dir[1] || in_dir[3] == in_dir[2]){ in_dir[3] = in_dir[3]%4 + 1; } while(out_dir[1] == out_dir[0]){ out_dir[1] = out_dir[1]%4 + 1; } while(out_dir[2] == out_dir[0] || out_dir[2] == out_dir[1]){ out_dir[2] = out_dir[2]%4 + 1; } while(out_dir[3] == out_dir[0] || out_dir[3] == out_dir[1] || out_dir[3] == out_dir[2]){ out_dir[3] = out_dir[3]%4 + 1; } for( int i=0;i<in_dir.size();i++ ){ if(in_dir[i] == 1){ inlet[i].first = 100; inlet[i].second = rand()%80 + 10; while(valid[0][inlet[i].second] == 1 || inlet[i].second%2 == 0){ inlet[i].second = rand()%80 + 10; } valid[0][inlet[i].second] = 1; } else if(in_dir[i] == 2){ inlet[i].second = 0; inlet[i].first = rand()%80 + 10; while(valid[1][inlet[i].first] == 1 || inlet[i].first%2 == 0){ inlet[i].first = rand()%80 + 10; } valid[1][inlet[i].first] = 1; } else if(in_dir[i] == 3){ inlet[i].first = 0; inlet[i].second = rand()%80 + 10; while(valid[2][inlet[i].second] == 1 || inlet[i].second%2 == 0){ inlet[i].second = rand()%80 + 10; } valid[2][inlet[i].second] = 1; } else if(in_dir[i] == 4){ inlet[i].second = 100; inlet[i].first = rand()%80 + 10; while(valid[3][inlet[i].first] == 1 || inlet[i].first%2 == 0){ inlet[i].first = rand()%80 + 10; } valid[3][inlet[i].first] = 1; } if(out_dir[i] == 1){ outlet[i].first = 100; outlet[i].second = rand()%80 + 10; while(valid[0][outlet[i].second] == 1 || outlet[i].second%2 == 0){ outlet[i].second = rand()%80 + 10; } } else if(out_dir[i] == 2){ outlet[i].second = 0; outlet[i].first = rand()%80 + 10; while(valid[1][outlet[i].first] == 1 || outlet[i].first%2 == 0){ outlet[i].first = rand()%80 + 10; } } else if(out_dir[i] == 3){ outlet[i].first = 0; outlet[i].second = rand()%80 + 10; while(valid[2][outlet[i].second] == 1 || outlet[i].second%2 == 0){ outlet[i].second = rand()%80 + 10; } } else if(out_dir[i] == 4){ outlet[i].second = 100; outlet[i].first = rand()%80 + 10; while(valid[3][outlet[i].first] == 1 || outlet[i].first%2 == 0){ outlet[i].first = rand()%80 + 10; } } } }
//////////////////////////////////////////////////////////////////////////////// // correct_reads // // Correct the reads in the file 'fqf' using the data structure of trusted // kmers 'trusted', matrix of nt->nt error rates 'ntnt_prob' and prior nt // probabilities 'prior_prob'. 'starts' and 'counts' help openMP parallelize // the read processing. If 'pairedend_code' is 0, the reads are not paired; // if it's 1, this file is the first of a pair so print all reads and withold // combining; if it's 2, the file is the second of a pair so print all reads // and then combine both 1 and 2. //////////////////////////////////////////////////////////////////////////////// static void correct_reads(string fqf, int pe_code, bithash * trusted, vector<streampos> & starts, vector<unsigned long long> & counts, double ntnt_prob[Read::max_qual][4][4], double prior_prob[4]) { // output directory struct stat st_file_info; string path_suffix = split(fqf,'/').back(); string out_dir("."+path_suffix); if(stat(out_dir.c_str(), &st_file_info) == 0) { cerr << "Hidden temporary directory " << out_dir << " already exists and will be used" << endl; } else { if(mkdir(out_dir.c_str(), S_IRWXU) == -1) { cerr << "Failed to create hidden temporary directory " << out_dir << endl; exit(EXIT_FAILURE); } } // collect stats stats * thread_stats = new stats[omp_get_max_threads()]; unsigned int chunk = 0; #pragma omp parallel //shared(trusted) { int tid = omp_get_thread_num(); // input ifstream reads_in(fqf.c_str()); unsigned int tchunk; string header,ntseq,mid,strqual,corseq; int trim_length; char* nti; Read *r; #pragma omp critical tchunk = chunk++; while(tchunk < starts.size()) { reads_in.seekg(starts[tchunk]); // output string toutf(out_dir+"/"); stringstream tconvert; tconvert << tchunk; toutf += tconvert.str(); if(overwrite_temp || stat(toutf.c_str(), &st_file_info) == -1) { ofstream reads_out(toutf.c_str()); //cout << toutf << endl; // output log string tlogf = toutf + ".log"; ofstream corlog_out; if(out_log) { corlog_out.open(tlogf.c_str()); } unsigned long long tcount = 0; while(getline(reads_in, header)) { //cout << tid << " " << header << endl; // get sequence getline(reads_in, ntseq); //cout << ntseq << endl; // convert ntseq to iseq vector<unsigned int> iseq; for(int i = 0; i < ntseq.size(); i++) { nti = strchr(nts, ntseq[i]); iseq.push_back(nti - nts); } // get quality values getline(reads_in,mid); //cout << mid << endl; getline(reads_in,strqual); //cout << strqual << endl; vector<int> untrusted; if(iseq.size() < trim_t) trim_length = 0; else { for(int i = 0; i < iseq.size()-k+1; i++) { if(!trusted->check(&iseq[i])) { untrusted.push_back(i); } } trim_length = quick_trim(strqual, untrusted); //trim_length = iseq.size(); } // fix error reads if(untrusted.size() > 0) { r = new Read(header, &iseq[0], strqual, untrusted, trim_length); corseq = r->correct(trusted, ntnt_prob, prior_prob); // output read w/ trim and corrections output_read(reads_out, corlog_out, pe_code, header, ntseq, mid, strqual, corseq, thread_stats[tid]); delete r; } else { output_read(reads_out, corlog_out, pe_code, header, ntseq, mid, strqual, ntseq.substr(0,trim_length), thread_stats[tid]); // output read as trimmed /* if(contrail_out) reads_out << header << "\t" << ntseq.substr(0,trim_length) << endl; else reads_out << header << endl << ntseq.substr(0,trim_length) << endl << mid << endl << strqual.substr(0,trim_length) << endl; */ } if(++tcount == counts[tchunk]) break; } reads_out.close(); } #pragma omp critical tchunk = chunk++; } reads_in.close(); } // combine stats for(int i = 1; i < omp_get_max_threads(); i++) { thread_stats[0].validated += thread_stats[i].validated; thread_stats[0].corrected += thread_stats[i].corrected; thread_stats[0].trimmed += thread_stats[i].trimmed; thread_stats[0].trimmed_only += thread_stats[i].trimmed_only; thread_stats[0].removed += thread_stats[i].removed; } // print stats int suffix_index = fqf.rfind("."); string outf; if(suffix_index == -1) { outf = fqf+".stats.txt"; } else { outf = fqf.substr(0,suffix_index+1) + "stats.txt"; } ofstream stats_out(outf.c_str()); stats_out << "Validated: " << thread_stats[0].validated << endl; stats_out << "Corrected: " << thread_stats[0].corrected << endl; stats_out << "Trimmed: " << thread_stats[0].trimmed << endl; stats_out << "Trimmed only: " << thread_stats[0].trimmed_only << endl; stats_out << "Removed: " << thread_stats[0].removed << endl; stats_out.close(); }
int photoalbum::exports::main(int argc, const char* argv[]) { bool show_help = false, fullsize = false; std::string db_file, output_directory, geometry; std::vector<commandline::option> options{ commandline::parameter("db", db_file, "Database file path"), commandline::parameter("out", output_directory, "Output directory"), commandline::parameter("geometry", geometry, "ImageMagick geometry"), commandline::flag("fullsize", fullsize, "Do not scale images"), commandline::flag("help", show_help, "Show a help message") }; commandline::parse(argc, argv, options); if(show_help) { commandline::print(argc, argv, options); return 0; } if(db_file.empty()) { std::cerr << "database file not specified" << std::endl; commandline::print(argc, argv, options); return 1; } if(output_directory.empty()) { std::cerr << "output directory not specified" << std::endl; commandline::print(argc, argv, options); return 1; } if((!fullsize && geometry.empty()) || (fullsize && geometry.length())) { std::cerr << "exactly one of --fullsize and --geometry must be specified" << std::endl; commandline::print(argc, argv, options); return 1; } sqlite::connection conn(db_file); boost::filesystem::path out_dir(output_directory); sqlite::rowset<int, std::string> albums; sqlite::select( conn, "SELECT album_id, name FROM album ", sqlite::empty_row(), albums ); for(const sqlite::row<int, std::string> album : albums) { boost::filesystem::path album_dir = out_dir/sqlite::column<1>(album); std::cerr << "exporting to directory " << album_dir << std::endl; boost::filesystem::create_directory(album_dir); sqlite::rowset<int> photographs; sqlite::select( conn, "SELECT photograph.photograph_id " "FROM album JOIN photograph_in_album " "ON album.album_id = photograph_in_album.album_id " "JOIN photograph " "ON photograph_in_album.photograph_id = photograph.photograph_id " "WHERE album.album_id = ? " "ORDER BY taken ASC ", sqlite::row<int>(sqlite::column<0>(album)), photographs ); static const char *unknown_string = "unknown"; std::string last_taken = unknown_string; int count = 0; for(const sqlite::row<int> photo_id : photographs) { sqlite::row<std::string, std::vector<unsigned char>> photo_data; { sqlite::rowset<std::string, std::vector<unsigned char>> data_list; sqlite::select( conn, "SELECT taken, data " "FROM photograph NATURAL JOIN jpeg_data " "WHERE photograph_id = ? ", photo_id, data_list ); if(!data_list.size()) { std::cerr << "no photograph with id " << sqlite::column<0>(photo_id) << std::endl; continue; } photo_data = std::move(data_list[0]); } // Write out the photograph. const std::string taken = sqlite::column<0>(photo_data).length()? std::string(sqlite::column<0>(photo_data), 0, 10):unknown_string; if(taken == last_taken) count++; else { count = 1; last_taken = taken; } std::ostringstream out_filename; out_filename << taken << "_" << std::setfill('0') << std::setw(4) << count << ".jpg"; const boost::filesystem::path jpeg_file = album_dir/out_filename.str(); std::cerr << "export photograph to " << jpeg_file << std::endl; boost::filesystem::ofstream os(jpeg_file); if(fullsize) { const std::vector<unsigned char>& data = sqlite::column<1>(photo_data); os << std::string((const char*)(&(data[0])), data.size()); } else { std::vector<unsigned char> data; util::scale( sqlite::column<1>(photo_data), geometry, data ); os << std::string((const char*)(&(data[0])), data.size()); } os.close(); } } }