static bool createJobFile(istream &src, const string &destFile, const Args &args) { Log::debug("Create job file %s", destFile.c_str()); ofstream dest(destFile, ios::binary | ios::trunc); dest << "\033CUPS_BOOMAGA\n"; dest << "JOB=" << escapeString(args.jobID) << "\n"; dest << "USER="******"\n"; dest << "TITLE=" << escapeString(args.title) << "\n"; dest << "COUNT=" << args.count << "\n"; dest << "OPTIONS=" << escapeString(args.options) << "\n"; dest << "CUPS_BOOMAGA_DATA\n"; dest << src.rdbuf(); dest.close(); if (src.bad() || !dest.good()) { Log::debug("Delete file %s", destFile.c_str()); unlink(destFile.c_str()); Log::error("Can't create job file: %s", strerror(errno)); return false; } if (chown(destFile.c_str(), args.pwd->pw_uid, -1) != 0) { Log::error("Can't change owner on directory %s: %s", destFile.c_str(), std::strerror(errno)); return false; } return true; }
//read coefficients from a stream. This assumes pairs of bitstring and values, e.g. //010100 1.232 //101011 65.432 int hypercube_lowd::read_func_labeled(istream &in) { int i=0, count; char gt[dim+2]; if (in.bad()) { cerr <<"hypercube_lowd::read_func_labeled: bad stream\n"; return HC_BADARG; } count=0; while(in.good() && count<(1<<dim)) { i=0; in >>gt; for (int k=0; k<dim; k++) if (gt[k]=='1') i+=1<<k; in >>func[i]; count++; in.ignore(100, '\n'); } if (count<(1<<dim)) { cerr <<"hypercube_lowd::read_func: file end reached after "<<i<<" values!\n"; return HC_BADARG; } return 0; }
void seekToFirstTree(istream &inputFile) { if (DEBUG_OUTPUT >= 3){ cout << "seekToFirstTree called." << endl; } string fileInputUpper; getline(inputFile,fileInputUpper); if (inputFile.bad()) { cout << "Can not read file given to seekToFirstTree." << endl; exit (0); } transform(fileInputUpper.begin(), fileInputUpper.end(),fileInputUpper.begin(), ::toupper); while (!( fileInputUpper.find("TREE") != string::npos && fileInputUpper.find("=") != string::npos)) { if (DEBUG_OUTPUT >= 3){ cout << fileInputUpper.substr(0,40) << endl; } getline(inputFile,fileInputUpper); if (inputFile.bad()) { cout << "Can not read file given to seekToFirstTree." << endl; exit (0); } transform(fileInputUpper.begin(), fileInputUpper.end(),fileInputUpper.begin(), ::toupper); } if (DEBUG_OUTPUT >= 3){ cout << fileInputUpper.substr(0,40) << endl; cout << "inputFile.tellg(): " << inputFile.tellg() << endl; cout << "inputFile.bad(): " << inputFile.bad() << endl; cout << "inputFile.good(): " << inputFile.good() << endl; cout << "inputFile.eof(): " << inputFile.eof() << endl; cout << "inputFile.rdstate(): " << inputFile.rdstate() << endl; } // Something is happening to Peter's computer here! int curPosition = inputFile.tellg(); // This might be causing problems to seek negative int from cur position // inputFile.seekg(-(fileInputUpper.size()+1),ios_base::cur); inputFile.seekg( curPosition - ((fileInputUpper.size()+1)), ios_base::beg); if (DEBUG_OUTPUT >= 3){ cout << "inputFile.tellg(): " << inputFile.tellg() << endl; cout << "inputFile.bad(): " << inputFile.bad() << endl; cout << "inputFile.good(): " << inputFile.good() << endl; cout << "inputFile.eof(): " << inputFile.eof() << endl; cout << "inputFile.rdstate(): " << inputFile.rdstate() << endl; cout << "seekToFirstTree done." << endl; } }
////////////////////////////////////////////////////////////////////// // Construction/Destruction ////////////////////////////////////////////////////////////////////// void GetALineFromStream(string &LineString, istream &Datastream) { char Buffer; if(!Datastream.eof()) { Datastream.get(Buffer); } else { return; } if(Datastream.bad()) { throw exception("GetALineFromStream Error when iostream Get get(Buffer); File:__FILE__;Line:__LINE__"); } while(Buffer!='\n') { if(!Datastream.eof()) { LineString += Buffer; Datastream.get(Buffer); } else { break; } if(Datastream.bad()) { throw exception("GetALineFromStream Error when iostream Get get(Buffer); File:__FILE__;Line:__LINE__"); } } }
void readFasta(istream& istr, SQDict& dict, bool parseSeqOffset) { string name; vector<char> buffer; while (true) { string s; getline(istr, s); if (istr.bad()) { cerr << "error reading SAM file" << endl; exit(1); } if (istr.eof() or s[0] == '>') { if (name.length() > 0) { // add previous sequence long long int seqOffset = 0; if (parseSeqOffset) { int j = name.find(':'); if (j < (int)name.length()) { seqOffset = atoll(&name.c_str()[j + 1]) - 1; name = name.substr(0, j); } } Contig& c = dict[name]; if (c.name.length() == 0) { c.name = name; c.len = buffer.size(); c.idx = dict.size() - 1; c.seq[0] = DNASequence(buffer.begin(), buffer.end()); c.seqOffset[0] = seqOffset; cerr << "added contig [" << c.name << "] of length [" << c.len << "]" << " with start offset [" << c.seqOffset[0] << "]" << endl; } } if (istr.eof()) break; int stop = s.find(" ")-1; if (stop > 0) { name = s.substr(1,stop); } else { name = s.substr(1); } buffer.clear(); } else { buffer.insert(buffer.end(), s.begin(), s.end()); } } }
void printStreamState(const istream& istr) { if (istr.good()) { cout << "stream state is good" << endl; } if (istr.bad()) { cout << "stream state is bad" << endl; } if (istr.fail()) { cout << "stream state is fail" << endl; } if (istr.eof()) { cout << "stream state is eof" << endl; } }
//-------------------------------------------------- bool ofBuffer::set(istream & stream){ if(stream.bad()){ clear(); return false; }else{ buffer.clear(); } vector<char> aux_buffer(ioSize); while(stream.good()){ stream.read(&aux_buffer[0], ioSize); buffer.insert(buffer.end(),aux_buffer.begin(),aux_buffer.begin()+stream.gcount()); } buffer.push_back(0); return true; }
inline bool read_one_line(istream& strm, std::string& /*output*/ line) { getline(strm, line); if (strm.eof()) return false; if (strm.bad() || strm.fail()) { err(1,"Error: failed to read from file '%s'", current_file_name.c_str()); } ++current_line; ++lines_in_current_file; return true; }
int hivpopulation::read_resistance_coefficients(istream &model){ if (HIVPOP_VERBOSE){ cerr<<"hivpopulation::read_resistance_coefficients(): read coefficients "; } if (model.bad()){ cerr<<"hivpopulation::read_resistance_coefficients(): BAD MODEL STREAM!"<<endl; return HIVPOP_BADARG; } double val, wt_resistance=0; vector <int> loci; vector<string> strs; string line; // reset the hypercube trait[1].reset(); // read the stream while(!model.eof()){ strs.clear(); loci.clear(); getline(model, line); boost::split(strs, line, boost::is_any_of("\t ")); //cout <<"a "<<line<<" "<<strs.size(); if (strs.size()>1){ for (unsigned int entry=0; entry<strs.size()-1; entry++){ loci.push_back(atoi(strs[entry].c_str())); //cout<<loci.back()<<" "<<strs[entry].c_str()<<" "; } val=atof(strs.back().c_str()); add_trait_coefficient(val, loci,1); wt_resistance+=val*pow(-1.0,(double)loci.size()); //cout <<loci.size()<<" "<<wt_resistance<<endl; } //cout<<loci[0]<<" "<<val<<" "<<loci.size()<<endl; } trait[1].hypercube_mean=-wt_resistance; // update the replication and fitness of all clones update_traits(); update_fitness(); if (HIVPOP_VERBOSE){ cerr<<"...done"<<endl; } return 0; }
void zipios::ZipOutputStreamTest::entryToFile(const string &ent_name, istream &is, const string &outfile, bool cerr_report) { ofstream ofs( outfile.c_str(), ios::out | ios::binary ) ; ofs << is.rdbuf() ; if ( cerr_report ) { cerr << "writing " << ent_name << " to " << outfile << endl ; cerr << "Stream state: " ; cerr << "good() = " << is.good() << ",\t" ; cerr << "fail() = " << is.fail() << ",\t" ; cerr << "bad() = " << is.bad() << ",\t" ; cerr << "eof() = " << is.eof() << endl << endl; } ofs.close() ; }
bool MilestonePath::Load(istream& in,CSpace* space) { Assert(space != NULL); vector<Config> configs; int n; in>>n; if(in.bad()) return false; Assert(n > 0); configs.reserve(n); Config temp; for(int i=0;i<n;i++) { in>>temp; configs.push_back(temp); } CreateEdgesFromMilestones(space,configs); return true; }
static vector<pair<string, set<set<string> > > > readScheme(istream& in){ vector<string> lines; string s; while(getline(in, s)){ s.erase(remove_if(s.begin(), s.end(), ::isspace), s.end()); if(!s.empty()) lines.push_back(s); if(in.fail() || in.eof()) break; } if(in.bad()) report_fatal_error("error encountered reading schema input"); vector<pair<string, set<set<string> > > > result; for(vector<string>::iterator i = lines.begin(), e = lines.end(); i != e; ++i){ vector<string> entries = split(*i, ';'); if(entries.size() < 2) report_fatal_error("invalid formatting for line '" + *i + "' in instrumentation schema"); string fnPattern = entries[0]; set<set<string> > schemes; for(vector<string>::iterator j = ++entries.begin(), je = entries.end(); j != je; ++j){ string scheme = *j; transform(scheme.begin(), scheme.end(), scheme.begin(), ::toupper); if(scheme[0] != '{' || scheme[scheme.length()-1] != '}') report_fatal_error("invalid formatting for entry '" + scheme + "' in instrumentation schema", false); scheme.erase(0, 1); scheme.erase(scheme.length()-1, 1); const vector<string> methods = split(scheme, ','); set<string> methodsSet; for(vector<string>::const_iterator k = methods.begin(), ke = methods.end(); k != ke; ++k){ if(!k->empty()) methodsSet.insert(*k); } schemes.insert(methodsSet); } result.push_back(make_pair(fnPattern, schemes)); } return(result); }
bool LinearPathResource::Load(istream& in) { times.clear(); milestones.clear(); Real t; Vector x; while(in) { in >> t >> x; if(in) { times.push_back(t); milestones.push_back(x); } } if(in.bad()) { return false; } return true; }
void fillVector(vector<int> &v, istream &ist, char terminator) { int x; while(ist >> x) v.push_back(x); if(ist.bad()) error("Some unusual error occurred, stream is in bad state."); if(ist.eof()) return; if(ist.fail()) { ist.clear(); // clear stream state char c; ist >> c; if(c == terminator) { cout << "found terminator\n"; return; } ist.unget(); ist.clear(ios_base::failbit); // set the state to fail }
bool ZipFile::readCentralDirectory ( istream &_zipfile ) { // Find and read eocd. if ( ! readEndOfCentralDirectory( _zipfile ) ) throw FCollException( "Unable to find zip structure: End-of-central-directory" ) ; // Position read pointer to start of first entry in central dir. _vs.vseekg( _zipfile, _eocd.offset(), ios::beg ) ; int entry_num = 0 ; // Giving the default argument in the next line to keep Visual C++ quiet _entries.resize ( _eocd.totalCount(), 0 ) ; while ( ( entry_num < _eocd.totalCount() ) ) { ZipCDirEntry *ent = new ZipCDirEntry ; _entries[ entry_num ] = ent ; _zipfile >> *ent ; if ( ! _zipfile ) { if ( _zipfile.bad() ) throw IOException( "Error reading zip file while reading zip file central directory" ) ; else if ( _zipfile.fail() ) throw FCollException( "Zip file consistency problem. Failure while reading zip file central directory" ) ; else if ( _zipfile.eof() ) throw IOException( "Premature end of file while reading zip file central directory" ) ; } ++entry_num ; } // Consistency check. eocd should start here int pos = _vs.vtellg( _zipfile ) ; _vs.vseekg( _zipfile, 0, ios::end ) ; int remaining = static_cast< int >( _vs.vtellg( _zipfile ) ) - pos ; if ( remaining != _eocd.eocdOffSetFromEnd() ) throw FCollException( "Zip file consistency problem. Zip file data fields are inconsistent with zip file layout" ) ; // Consistency check 2, are local headers consistent with // cd headers if ( ! confirmLocalHeaders( _zipfile ) ) throw FCollException( "Zip file consistency problem. Zip file data fields are inconsistent with zip file layout" ) ; return true ; }
//read coefficients from a stream. This assumes a stream that delivers 2^L values in //the canocical bit order int hypercube_lowd::read_func(istream &in) { int i; if (in.bad()) { cerr <<"hypercube_lowd::read_func: bad stream\n"; return HC_BADARG; } i=0; while(in.good() && i<(1<<dim)) { in >>func[i]; i++; } if (i<(1<<dim)) { cerr <<"hypercube_lowd::read_func: file end reached after "<<i<<" values!\n"; return HC_BADARG; } return 0; }
//-------------------------------------------------- bool ofBuffer::set(istream & stream){ clear(); if( stream.bad() ) return false; char aux_buffer[1024]; std::streamsize size = 0; stream.read(aux_buffer, 1024); std::streamsize n = stream.gcount(); while( n > 0 ){ // we resize to size+1 initialized to 0 to have a 0 at the end for strings buffer.resize(size+n+1,0); memcpy(&(buffer[0])+size,aux_buffer,n); size += n; if( stream ){ stream.read(aux_buffer, 1024); n = stream.gcount(); } else n = 0; } return true; }
bool fcnn::internal::read(istream &is, T &n) { char c; T x; is >> x; if (is.fail() || is.bad()) return false; else { if (is.eof()) { n = x; return true; } c = is.peek(); if ((c == ' ') || (c == '\t') || (c == '\n')) { n = x; return true; } } return false; }
void translateStream(istream& input, ostream& output){ char word[255]; char translated_word[255]; int i = 0; word[0] = input.get(); if (input.bad() || input.eof()) return; if (!isalpha(word[0]) && !isdigit(word[0])){ translated_word[0] = word[0]; translated_word[1] = '\0'; } else { for (i = 1; isalpha(word[i-1]) || isdigit(word[i-1]); i++){ word[i] = input.get(); } input.unget(); word[i-1] = '\0'; translateWord(word, translated_word); } output << translated_word; translateStream(input, output); }
uint64_t PackUtils::Pack(istream& in, ostream& out) { uint64_t result = 0; char readBuf[PackUtils::CHUNK]; char writeBuf[PackUtils::CHUNK]; int ret, flush; uint64_t have; z_stream strm; /* allocate deflate state */ strm.zalloc = Z_NULL; strm.zfree = Z_NULL; strm.opaque = Z_NULL; ret = deflateInit(&strm, Z_DEFAULT_COMPRESSION); // Couldn't init deflate if (ret != Z_OK) { return 0; } do { strm.avail_in = PackUtils::CHUNK; if(!in.read(readBuf, PackUtils::CHUNK)) { strm.avail_in = (uInt)in.gcount(); } if(in.bad()) { (void)deflateEnd(&strm); return 0; } flush = (in.eof() || strm.avail_in == 0) ? Z_FINISH : Z_NO_FLUSH; strm.next_in = (Bytef*) readBuf; /* run deflate() on input until output buffer not full, finish compression if all of source has been read in */ do { strm.avail_out = CHUNK; strm.next_out = (Bytef*) writeBuf; ret = deflate(&strm, flush); /* no bad return value */ have = CHUNK - strm.avail_out; result += have; out.write(writeBuf, have); if (out.bad()) { (void) deflateEnd(&strm); return 0; } } while (strm.avail_out == 0); /* done when last data in file processed */ } while (flush != Z_FINISH); /* clean up and return */ (void)deflateEnd(&strm); in.clear(); return result; }
void ConfSimple::parseinput(istream &input) { string submapkey; char cline[LL]; bool appending = false; string line; bool eof = false; for (;;) { cline[0] = 0; input.getline(cline, LL-1); LOGDEB((stderr, "Parse:line: [%s] status %d\n", cline, int(status))); if (!input.good()) { if (input.bad()) { LOGDEB((stderr, "Parse: input.bad()\n")); status = STATUS_ERROR; return; } LOGDEB((stderr, "Parse: eof\n")); // Must be eof ? But maybe we have a partial line which // must be processed. This happens if the last line before // eof ends with a backslash, or there is no final \n eof = true; } { int ll = strlen(cline); while (ll > 0 && (cline[ll-1] == '\n' || cline[ll-1] == '\r')) { cline[ll-1] = 0; ll--; } } if (appending) line += cline; else line = cline; // Note that we trim whitespace before checking for backslash-eol // This avoids invisible whitespace problems. trimstring(line); if (line.empty() || line.at(0) == '#') { if (eof) break; m_order.push_back(ConfLine(ConfLine::CFL_COMMENT, line)); continue; } if (line[line.length() - 1] == '\\') { line.erase(line.length() - 1); appending = true; continue; } appending = false; if (line[0] == '[') { trimstring(line, "[]"); if (dotildexpand) submapkey = path_tildexpand(line); else submapkey = line; // No need for adding sk to order, will be done with first // variable insert. Also means that empty section are // expandable (won't be output when rewriting) // Another option would be to add the subsec to m_order here // and not do it inside i_set() if init is true continue; } // Look for first equal sign string::size_type eqpos = line.find("="); if (eqpos == string::npos) { m_order.push_back(ConfLine(ConfLine::CFL_COMMENT, line)); continue; } // Compute name and value, trim white space string nm, val; nm = line.substr(0, eqpos); trimstring(nm); val = line.substr(eqpos+1, string::npos); trimstring(val); if (nm.length() == 0) { m_order.push_back(ConfLine(ConfLine::CFL_COMMENT, line)); continue; } i_set(nm, val, submapkey, true); if (eof) break; } }
uint64_t PackUtils::Unpack(istream& in, uint64_t size, ostream& out) { uint64_t result = 0; int ret; uint64_t remaining = size; uint64_t have; z_stream strm; char readBuf[PackUtils::CHUNK]; char writeBuf[PackUtils::CHUNK]; /* allocate inflate state */ strm.zalloc = Z_NULL; strm.zfree = Z_NULL; strm.opaque = Z_NULL; strm.avail_in = 0; strm.next_in = Z_NULL; ret = inflateInit(&strm); if (ret != Z_OK) return 0; /* decompress until deflate stream ends or end of file */ do { strm.avail_in = PackUtils::CHUNK; if(remaining < PackUtils::CHUNK) { strm.avail_in = (uInt)remaining; } remaining -= strm.avail_in; if(!in.read(readBuf, strm.avail_in)) { strm.avail_in = (uInt)in.gcount(); } if (in.bad()) { (void)inflateEnd(&strm); return 0; } if (strm.avail_in == 0) break; strm.next_in = (Bytef*) readBuf; /* run inflate() on input until output buffer not full */ do { strm.avail_out = CHUNK; strm.next_out = (Bytef*) writeBuf; ret = inflate(&strm, Z_NO_FLUSH); switch (ret) { case Z_NEED_DICT: ret = Z_DATA_ERROR; /* and fall through */ case Z_DATA_ERROR: case Z_MEM_ERROR: (void)inflateEnd(&strm); return 0; } have = CHUNK - strm.avail_out; result += have; out.write(writeBuf, have); if (out.bad()) { (void)inflateEnd(&strm); return 0; } } while (strm.avail_out == 0); /* done when inflate() says it's done */ } while (ret != Z_STREAM_END); /* clean up and return */ (void)inflateEnd(&strm); return ret == Z_STREAM_END ? result : 0; }
ftMessage readg2(istream& is, vector<shared_ptr<ftSurface> >& faces) //=========================================================================== { ftMessage status; if (is.bad()) { status.setError(FT_BAD_INPUT_FILE); return status; } IGESconverter conv; try { conv.readgo(is); } catch (...) { status.setError(FT_ERROR_IN_READ_IGES); return status; } // std::ofstream outfile("debug.out"); // conv.writedisp(outfile); vector<shared_ptr<GeomObject> > gogeom = conv.getGoGeom(); int nmbgeom = (int)gogeom.size(); faces.reserve(nmbgeom); // May be too much, but not really important int face_count = 0; // Remaining geometry. for (int i=0; i<nmbgeom; i++) { if (gogeom[i]->instanceType() == Class_SplineCurve) { if (conv.getGroup().size() == 0) { // One mesh surface expected. } else // Not expected. Ignore the current curve. status.addWarning(FT_UNEXPECTED_INPUT_OBJECT_IGNORED); } else if (gogeom[i]->instanceType() == Class_SplineSurface || gogeom[i]->instanceType() == Class_BoundedSurface) { shared_ptr<GeomObject> lg = gogeom[i]; shared_ptr<ParamSurface> gosf = dynamic_pointer_cast<ParamSurface, GeomObject>(lg); shared_ptr<ftSurface> ftsf(new ftSurface(gosf, face_count++)); faces.push_back(ftsf); } } return status; }
bool StarDatabase::loadBinary(istream& in) { uint32 nStarsInFile = 0; // Verify that the star database file has a correct header { int headerLength = strlen(FILE_HEADER); char* header = new char[headerLength]; in.read(header, headerLength); if (strncmp(header, FILE_HEADER, headerLength)) return false; delete[] header; } // Verify the version { uint16 version; in.read((char*) &version, sizeof version); LE_TO_CPU_INT16(version, version); if (version != 0x0100) return false; } // Read the star count in.read((char *) &nStarsInFile, sizeof nStarsInFile); LE_TO_CPU_INT32(nStarsInFile, nStarsInFile); if (!in.good()) return false; unsigned int totalStars = nStars + nStarsInFile; while (((unsigned int) nStars) < totalStars) { uint32 catNo = 0; float x = 0.0f, y = 0.0f, z = 0.0f; int16 absMag; uint16 spectralType; in.read((char *) &catNo, sizeof catNo); LE_TO_CPU_INT32(catNo, catNo); in.read((char *) &x, sizeof x); LE_TO_CPU_FLOAT(x, x); in.read((char *) &y, sizeof y); LE_TO_CPU_FLOAT(y, y); in.read((char *) &z, sizeof z); LE_TO_CPU_FLOAT(z, z); in.read((char *) &absMag, sizeof absMag); LE_TO_CPU_INT16(absMag, absMag); in.read((char *) &spectralType, sizeof spectralType); LE_TO_CPU_INT16(spectralType, spectralType); if (in.bad()) break; Star star; star.setPosition(x, y, z); star.setAbsoluteMagnitude((float) absMag / 256.0f); StarDetails* details = NULL; StellarClass sc; if (sc.unpack(spectralType)) details = StarDetails::GetStarDetails(sc); if (details == NULL) { cerr << _("Bad spectral type in star database, star #") << nStars << "\n"; return false; } star.setDetails(details); star.setCatalogNumber(catNo); unsortedStars.add(star); nStars++; } if (in.bad()) return false; DPRINTF(0, "StarDatabase::read: nStars = %d\n", nStarsInFile); clog << nStars << _(" stars in binary database\n"); // Create the temporary list of stars sorted by catalog number; this // will be used to lookup stars during file loading. After loading is // complete, the stars are sorted into an octree and this list gets // replaced. if (unsortedStars.size() > 0) { binFileStarCount = unsortedStars.size(); binFileCatalogNumberIndex = new Star*[binFileStarCount]; for (unsigned int i = 0; i < binFileStarCount; i++) { binFileCatalogNumberIndex[i] = &unsortedStars[i]; } sort(binFileCatalogNumberIndex, binFileCatalogNumberIndex + binFileStarCount, PtrCatalogNumberOrderingPredicate()); } return true; }