static void con_changed_handler(ipmi_con_t *ipmi, int err, unsigned int port_num, int still_connected, void *cb_data) { if (!interactive) { if (err) { fprintf(stderr, "Unable to setup connection: %x\n", err); leave(1); } if (!interactive_done) { interactive_done = 1; if (process_input_line(cmdstr)) continue_operation = 0; } } else { if (err) fprintf(stderr, "Connection failed to port %d: %x\n", port_num, err); else fprintf(stderr, "Connection up to port %d\n", port_num); if (!still_connected) fprintf(stderr, "All connection to the BMC are down\n"); } }
int expand_variations_impl ( ) { std::string line; CNGSObject <ngs::ReferenceSequence, ncbi::String> ref_obj; while ( std::getline ( std::cin, line ) ) { if (line.size() > 0) process_input_line ( ref_obj, line.c_str(), line.size() ); } return 0; }
static void user_input_ready(int fd, void *data) { int count = read(0, input_line+pos, 255-pos); int i, j; if (count < 0) { perror("input read"); con->close_connection(con); leave(1); } if (count == 0) { if (interactive) printf("\n"); con->close_connection(con); continue_operation = 0; return; } for (i=0; count > 0; i++, count--) { if ((input_line[pos] == '\n') || (input_line[pos] == '\r')) { input_line[pos] = '\0'; process_input_line(input_line); for (j=0; j<count; j++) input_line[j] = input_line[j+pos]; pos = 0; if (interactive ) printf("=> "); fflush(stdout); } else { pos++; } } if (pos >= 255) { fprintf(stderr, "Input line too long\n"); pos = 0; if (interactive) printf("=> "); fflush(stdout); } }
/* Sort each chunk of file using C++ sort routine (It is nlog(n) complexity, which applies quick_sort algorithm) */ int sort_file_chunks(int number_of_chunks) { // A table of two tuples to read the data from the input file vector<RECORD> data_table; int i = 0, j = 0; // Loop over each file chunks for(i = 0; i < number_of_chunks; i++) { // File name char filename[100]; // Format the filename sprintf(filename, "data_part_%d.dat", i); // Open a file for reading ifstream input_file_ptr(string(filename), ifstream::in); // Error exception if(! input_file_ptr.is_open()) return -1; // Error in openeing a file string line; // Read line-by-line while(getline(input_file_ptr, line)) data_table.push_back(process_input_line(line)); // Sort the vector in an increasing order using compare in-line function sort(data_table.begin(), data_table.end(), compare); // Close the opened input file input_file_ptr.close(); // Remove the file from the directory remove(filename); cout << "*** SORT: FILE: " << i << " has been successfully sorted\n"; // Write-back steps // Generate an output sorted file // Format the output file name sprintf(filename, "data_part_sorted_%d.dat", i); // Output file stream ofstream output_file_ptr; // Open for write output_file_ptr.open( string(filename), ofstream::out | ofstream::app ); // Error exception if(! output_file_ptr.is_open()) return -2; // Error in openeing a file // Loop over the vector data and write each back for(j = 0; j < data_table.size(); j++) output_file_ptr << data_table[j].key << " " << data_table[j].value << endl; cout << "*** SORT: File: " << i << " is stored" << endl; // Close the file output_file_ptr.close(); // Clear the vector for the next iteration data_table.clear(); } // Success flag return 1; }
/* Merge the file chucks */ int merge_file_chunks(int number_of_chunks) { // Number of bytes each data queue can maximum hold int max_buf_size = (int) ((MAX_MEM_SIZE / number_of_chunks) / LINE_SIZE_EST); // A file stream object for each file chunk ifstream input_file_ptr[number_of_chunks]; // Open every file int i = 0; for(i = 0; i < number_of_chunks; i++) { // Initialize IO char filename[100]; sprintf(filename, "data_part_sorted_%d.dat", i); // Open the file input_file_ptr[i].open(filename, ifstream::in); if(! input_file_ptr[i].is_open()) return -1; // Error in an input file } // A queue for each input file that holds that file data // We have a vector of queues that store all data of all files vector<queue<RECORD>> data_table_queues; for(i = 0; i < number_of_chunks; i++) { data_table_queues.push_back(queue<RECORD>()); string line; int j = 0; while(getline(input_file_ptr[i], line)) { data_table_queues[i].push(process_input_line(line)); j++; if(j == max_buf_size || input_file_ptr[i].eof()) break; } cout << "MERGE: FILE: " << i << " has been initially read\n"; } ofstream output_file_ptr; output_file_ptr.open( "database_sorted.dat", ofstream::out | ofstream::app ); if(! output_file_ptr.is_open()) return -2; int local_index = -1; RECORD local_data_record; long long local_key = 0; string local_value = ""; cout << "MERGE: Start Merging" << endl; // Merging while(true) { local_index = -1; local_key = 0; local_value = ""; for(i = 0; i < number_of_chunks; i++) { if(!(data_table_queues[i].empty())) { local_data_record = (RECORD) data_table_queues[i].front(); if(local_index < 0 || (local_data_record.key < local_key)) { local_index = i; local_key = local_data_record.key; local_value = local_data_record.value; } } } if(local_index == -1) break; output_file_ptr << local_key << " " << local_value << endl; data_table_queues[local_index].pop(); // There are more data to read in the input files if(data_table_queues[local_index].empty()) { int j = 0; if(! input_file_ptr[local_index].eof()) { cout << "MERGE: Reloading File " << local_index << endl; string line; while(getline(input_file_ptr[local_index], line)) { data_table_queues[local_index].push(process_input_line(line)); j++; if(j == max_buf_size || input_file_ptr[local_index].eof()) break; } cout << "MERGE: File " << local_index << " is reloaded\n"; } } } output_file_ptr.close(); for(i = 0; i < number_of_chunks; i++) { input_file_ptr[i].close(); char filename[100]; sprintf(filename, "data_part_sorted_%d.dat", i); remove(filename); } cout << "MERGE: Finished merging files" << endl; }