bool VersionedFileHeader::read_known( VFH_Type i_type, std::istream &io_istream ) { // save stream state and position streampos pos = io_istream.tellg(); ios_base::iostate state = io_istream.rdstate(); if ( !read_raw(io_istream) || m_magicNumber!=(isMSBF()?g_versioned_headers_list[i_type].magic_number_MSBF:g_versioned_headers_list[i_type].magic_number_LSBF) ) { // reading failed, rewind to starting point and reset error flags m_magicNumber = m_version = 0; m_isMSBF = ( MSBF_PROCESSOR()?1:0 ); io_istream.seekg(pos); io_istream.setstate(state); return false; } return true; }
std::string PayloadConfidentialBlockTest::getHex(std::istream &stream) { stream.clear(); stream.seekg(0); std::stringstream buf; unsigned int c = stream.get(); while (!stream.eof()) { buf << std::hex << c << ":"; c = stream.get(); } buf << std::flush; return buf.str(); }
bool ossim_gpkg::checkApplicationId(std::istream& in) { //--- // Check the application_id: // Requirement 2: Every GeoPackage must contain 0x47503130 ("GP10" in ACII) // in the application id field of the SQLite database header to indicate a // GeoPackage version 1.0 file. //--- bool result = false; char APP_ID[4]; in.seekg( 68, std::ios_base::beg ); in.read(APP_ID, 4); if ( (APP_ID[0] == 'G') && (APP_ID[1] == 'P') && (APP_ID[2] == '1') && (APP_ID[3] == '0') ) { result = true; } return result; }
///extract a formatted string from the output, ignoring comments string ReadFromStream(std::istream & s) { if (!s) return ""; std::streampos curpos = s.tellg(); assert(curpos >= 0); string str; s >> str; if (!s || str.empty()) return ""; if (str[0] == '#') { s.seekg(curpos); std::getline(s,str); return ReadFromStream(s); } return str; }
bool AbstractOccupancyOcTree::readBinary(std::istream &s) { if (!s.good()){ OCTOMAP_WARNING_STR("Input filestream not \"good\" in OcTree::readBinary"); } // check if first line valid: std::string line; std::istream::pos_type streampos = s.tellg(); std::getline(s, line); unsigned size; double res; if (line.compare(0,AbstractOccupancyOcTree::binaryFileHeader.length(), AbstractOccupancyOcTree::binaryFileHeader) ==0){ std::string id; if (!AbstractOcTree::readHeader(s, id, size, res)) return false; OCTOMAP_DEBUG_STR("Reading binary octree type "<< id); } else{ // try to read old binary format: s.clear(); // clear eofbit of istream s.seekg(streampos); if (readBinaryLegacyHeader(s, size, res)){ OCTOMAP_WARNING_STR("You are using an outdated binary tree file format."); OCTOMAP_WARNING_STR("Please convert your .bt files with convert_octree."); } else { OCTOMAP_ERROR_STR("First line of OcTree file header does not start with \""<< AbstractOccupancyOcTree::binaryFileHeader<<"\""); return false; } } // otherwise: values are valid, stream is now at binary data! this->clear(); this->setResolution(res); this->readBinaryData(s); if (size != this->size()){ OCTOMAP_ERROR("Tree size mismatch: # read nodes (%zu) != # expected nodes (%d)\n",this->size(), size); return false; } return true; }
void ModPhpHandler::inChild(int *p, const char *bin, const std::string& uri, zia::api::http::ITransaction & transac, std::istream& is) { std::string argsstr, pipeSize, queryString, scriptFilename; if (uri.find("?") != std::string::npos) argsstr = uri.substr(uri.find("?") + 1); std::cout << "argsstr = " << argsstr << std::endl; if (dup2(p[1], 1) == -1) std::cerr << "dup2 failed in child" << std::endl; if (dup2(p[0], 0) == -1) std::cerr << "dup2 failed in child" << std::endl; char* tab[6]; std::string lengh("CONTENT_LENGTH="); is.seekg(0, std::ios::end); int is_size = is.tellg(); tab[0] = const_cast<char*>((lengh + inttostr(is_size)).data()); tab[1] = "REQUEST_METHOD=POST"; tab[2] = "CONTENT_TYPE=application/x-www-form-urlencoded"; queryString = "QUERY_STRING=" + argsstr; tab[3] = const_cast<char*>(queryString.data()); scriptFilename = "SCRIPT_FILENAME=" + transac.getRequest().getPath(); tab[4] = const_cast<char*>(scriptFilename.data()); tab[5] = NULL; char* arg[2]; arg[0] = const_cast<char*>(bin); arg[1] = NULL; if (execve(arg[0], arg, tab) == -1) { std::cerr << "Execvp failed " << std::endl; perror("execve"); } }
bool LLModel::loadSkinInfo(LLSD& header, std::istream &is) { S32 offset = header["skin"]["offset"].asInteger(); S32 size = header["skin"]["size"].asInteger(); if (offset >= 0 && size > 0) { is.seekg(offset, std::ios_base::cur); LLSD skin_data; if (unzip_llsd(skin_data, is, size)) { mSkinInfo.fromLLSD(skin_data); return true; } } return false; }
bool LLModel::loadDecomposition(LLSD& header, std::istream& is) { S32 offset = header["physics_convex"]["offset"].asInteger(); S32 size = header["physics_convex"]["size"].asInteger(); if (offset >= 0 && size > 0 && !mSubmodelID) { is.seekg(offset, std::ios_base::cur); LLSD data; if (unzip_llsd(data, is, size)) { mPhysics.fromLLSD(data); updateHullCenters(); } } return true; }
void getFileCrc(std::istream& input_stream, std::vector<char>& buff, unsigned long& result_crc) { unsigned long calculate_crc = 0; unsigned long size_read = 0; unsigned long total_read = 0; do { input_stream.read(buff.data(), buff.size()); size_read = (unsigned long)input_stream.gcount(); if (size_read>0) calculate_crc = crc32(calculate_crc, (const unsigned char*)buff.data(), size_read); total_read += size_read; } while (size_read>0); input_stream.seekg(0); result_crc = calculate_crc; }
/* * Check for DAWG in first 4 to identify as special binary format, * otherwise assume ASCII, one word per line */ void DawgLexicon::addWordsFromFile(std::istream& input) { char firstFour[4], expected[] = "DAWG"; if (input.fail()) { error("DawgLexicon::addWordsFromFile: Couldn't read input"); } input.read(firstFour, 4); if (strncmp(firstFour, expected, 4) == 0) { if (otherWords.size() != 0) { error("DawgLexicon::addWordsFromFile: Binary files require an empty lexicon"); } readBinaryFile(input); } else { // plain text file input.seekg(0); std::string line; while (getline(input, line)) { add(line); } } }
bool VersionedFileHeader::read_unknown( std::istream &io_istream, VFH_Type &o_id ) { // save stream state and position streampos pos = io_istream.tellg(); ios_base::iostate state = io_istream.rdstate(); bool isMSBF_; // from magic number if ( !read_raw( io_istream ) || !typeFromMagicNumber(m_magicNumber, o_id, isMSBF_) || isMSBF_!=isMSBF() ) { // reading failed, rewind to starting point and reset error flags m_magicNumber = m_version = 0; m_isMSBF = ( MSBF_PROCESSOR()?1:0 ); io_istream.seekg( pos ); io_istream.setstate( state ); return false; } return true; }
void BsaArchive::loadNamed(size_t count, std::istream& stream) { std::vector<std::string> names; names.reserve(count); std::vector<Entry> entries; entries.reserve(count); std::streamsize base = stream.tellg(); if(!stream.seekg(std::streampos(count) * -18, std::ios_base::end)) throw std::runtime_error("Failed to seek to archive footer ("+std::to_string(count)+" entries)"); for(size_t i = 0;i < count;++i) { std::array<char,13> name; stream.read(name.data(), name.size()-1); name.back() = '\0'; // Ensure null termination std::replace(name.begin(), name.end(), '\\', '/'); names.push_back(std::string(name.data())); int iscompressed = read_le16(stream); if(iscompressed != 0) throw std::runtime_error("Compressed entries not supported"); Entry entry; entry.mStart = ((i == 0) ? base : entries[i-1].mEnd); entry.mEnd = entry.mStart + read_le32(stream); entries.push_back(entry); } if(!stream.good()) throw std::runtime_error("Failed reading archive footer"); for(const std::string &name : names) { auto iter = std::lower_bound(mLookupName.begin(), mLookupName.end(), name); if(iter == mLookupName.end() || *iter != name) mLookupName.insert(iter, name); } mEntries.resize(mLookupName.size()); for(size_t i = 0;i < count;++i) { auto iter = std::find(mLookupName.cbegin(), mLookupName.cend(), names[i]); mEntries[std::distance(mLookupName.cbegin(), iter)] = entries[i]; } }
/** * Initializes the hash function, with a stream value. * * @param stream The stream value to be used for computation. */ void HashFunction::init(std::istream &stream) { // resets the current hash value this->reset(); // allocates the file buffer unsigned char streamBuffer[HASH_STREAM_BUFFER_SIZE]; // initializes the read size unsigned int readSize = 0; // retrieves the initial position std::streamoff initialPosition = stream.tellg(); // iterates continuously while(1) { // reads the buffer stream.read((char *) streamBuffer, HASH_STREAM_BUFFER_SIZE); // retrieves the read size readSize = stream.gcount(); // updates the hash value with the // stream buffer value this->update(streamBuffer, readSize); // in case the end of file was reached if(stream.eof()) { // breaks the cycle break; } }; // finalizes the hash value this->finalize(); // clears the error bits stream.clear(); // seeks the the initial position stream.seekg(initialPosition, std::fstream::beg); }
bool NodeGraph::tryReadClipboard(const QPointF& pos, std::istream& ss) { // Check if this is a regular clipboard // This will also check if this is a single node try { SERIALIZATION_NAMESPACE::NodeClipBoard cb; SERIALIZATION_NAMESPACE::read(std::string(), ss, &cb); std::list<std::pair<NodePtr, SERIALIZATION_NAMESPACE::NodeSerializationPtr > > nodesToPaste; for (SERIALIZATION_NAMESPACE::NodeSerializationList::const_iterator it = cb.nodes.begin(); it != cb.nodes.end(); ++it) { nodesToPaste.push_back(std::make_pair(NodePtr(), *it)); } _imp->pasteNodesInternal(nodesToPaste, pos, NodeGraphPrivate::PasteNodesFlags(NodeGraphPrivate::ePasteNodesFlagRelativeToCentroid | NodeGraphPrivate::ePasteNodesFlagUseUndoCommand)); } catch (...) { // Check if this was copy/pasted from a project directly try { ss.seekg(0); SERIALIZATION_NAMESPACE::ProjectSerialization isProject; SERIALIZATION_NAMESPACE::read(std::string(), ss, &isProject); std::list<std::pair<NodePtr, SERIALIZATION_NAMESPACE::NodeSerializationPtr > > nodesToPaste; for (SERIALIZATION_NAMESPACE::NodeSerializationList::const_iterator it = isProject._nodes.begin(); it != isProject._nodes.end(); ++it) { nodesToPaste.push_back(std::make_pair(NodePtr(), *it)); } _imp->pasteNodesInternal(nodesToPaste, pos, NodeGraphPrivate::PasteNodesFlags(NodeGraphPrivate::ePasteNodesFlagRelativeToCentroid | NodeGraphPrivate::ePasteNodesFlagUseUndoCommand)); } catch (...) { return false; } } return true; }
static libmaus2::aio::SynchronousGenericInput<uint64_t>::unique_ptr_type openWordPairFile( std::istream & CIS, uint64_t const n, uint64_t const offset ) { libmaus2::suffixsort::GapArrayByteOverflowKeyAccessor acc(CIS); libmaus2::util::ConstIterator<libmaus2::suffixsort::GapArrayByteOverflowKeyAccessor,uint64_t> ita(&acc,0); libmaus2::util::ConstIterator<libmaus2::suffixsort::GapArrayByteOverflowKeyAccessor,uint64_t> ite(&acc,n); libmaus2::util::ConstIterator<libmaus2::suffixsort::GapArrayByteOverflowKeyAccessor,uint64_t> itc = std::lower_bound(ita,ite,offset); uint64_t const el = itc-ita; uint64_t const restel = n - el; CIS.clear(); CIS.seekg( el * 2 * sizeof(uint64_t), std::ios::beg ); libmaus2::aio::SynchronousGenericInput<uint64_t>::unique_ptr_type tSGI( new libmaus2::aio::SynchronousGenericInput<uint64_t>(CIS,1024,2*restel) ); return UNIQUE_PTR_MOVE(tSGI); }
// 压缩功能,统计符号频数,并将其保存到压缩文件,然后构造哈夫曼树,进行压缩,写入文件 void huffman_file_compressor::compress(std::istream& in, std::ostream& out) { byte_freq_map table; table.load_from_file(in); table.save_to_file(out); huffman_tree** forest = table.to_simple_huffman_forest(); huffman_tree* tree = huffman_tree::biuld_huffman_tree(forest, table.size()); in.clear(); in.seekg(0, std::ios::beg); huffman_file_encoder encoder = tree->to_file_encoder(); encoder.encode_file(in); std::streamoff length_offset = out.tellp(); out.seekp(sizeof(long long), std::ios::cur); long long length = encoder.write_to_file(out); out.seekp(length_offset, std::ios::beg); out.write((char*)&length, sizeof(long long)); tree->free_nodes(); delete forest; delete tree; }
//------------------------------------------------------------------------------ bool load( std::istream &stream ) { clean(); unsigned char e_ident[EI_NIDENT]; // Read ELF file signature stream.seekg( 0 ); stream.read( reinterpret_cast<char*>( &e_ident ), sizeof( e_ident ) ); // Is it ELF file? if ( stream.gcount() != sizeof( e_ident ) || e_ident[EI_MAG0] != ELFMAG0 || e_ident[EI_MAG1] != ELFMAG1 || e_ident[EI_MAG2] != ELFMAG2 || e_ident[EI_MAG3] != ELFMAG3 ) { return false; } if ( ( e_ident[EI_CLASS] != ELFCLASS64 ) && ( e_ident[EI_CLASS] != ELFCLASS32 )) { return false; } convertor.setup( e_ident[EI_DATA] ); header = create_header( e_ident[EI_CLASS], e_ident[EI_DATA] ); if ( 0 == header ) { return false; } if ( !header->load( stream ) ) { return false; } load_sections( stream ); load_segments( stream ); return true; }
bool test_input_seekable(std::istream& io) { int i; // old 'for' scope workaround. // Test seeking with ios::cur for (i = 0; i < data_reps; ++i) { for (int j = 0; j < chunk_size; ++j) if (io.get() != narrow_data()[j]) return false; io.seekg(-chunk_size, BOOST_IOS::cur); char buf[chunk_size]; io.read(buf, chunk_size); if (strncmp(buf, narrow_data(), chunk_size) != 0) return false; } // Test seeking with ios::beg std::streamoff off = 0; io.seekg(0, BOOST_IOS::beg); for (i = 0; i < data_reps; ++i, off += chunk_size) { for (int j = 0; j < chunk_size; ++j) if (io.get() != narrow_data()[j]) return false; io.seekg(off, BOOST_IOS::beg); char buf[chunk_size]; io.read(buf, chunk_size); if (strncmp(buf, narrow_data(), chunk_size) != 0) return false; } // Test seeking with ios::end io.seekg(0, BOOST_IOS::end); off = io.tellg(); io.seekg(-off, BOOST_IOS::end); for (i = 0; i < data_reps; ++i, off -= chunk_size) { for (int j = 0; j < chunk_size; ++j) if (io.get() != narrow_data()[j]) return false; io.seekg(-off, BOOST_IOS::end); char buf[chunk_size]; io.read(buf, chunk_size); if (strncmp(buf, narrow_data(), chunk_size) != 0) return false; } return true; }
/****************************************************************************** Create a dummy main module to wrap a library module. ******************************************************************************/ parsenode_t XQueryCompiler::createMainModule( parsenode_t aLibraryModule, std::istream& aXQuery, const zstring& aFileName) { //get the namespace from the LibraryModule LibraryModule* mod_ast = dynamic_cast<LibraryModule *>(&*aLibraryModule); if (!mod_ast) throw ZORBA_EXCEPTION(zerr::ZAPI0002_XQUERY_COMPILATION_FAILED, ERROR_PARAMS(ZED(BadLibraryModule))); const zstring& lib_namespace = mod_ast->get_decl()->get_target_namespace(); URI lURI(lib_namespace); if(!lURI.is_absolute()) { throw XQUERY_EXCEPTION(err::XQST0046, ERROR_PARAMS(lURI.toString(), ZED(MustBeAbsoluteURI)), ERROR_LOC(mod_ast->get_decl()->get_location())); } // Set up the original query stream as the result of resolving the // library module's URI aXQuery.clear(); aXQuery.seekg(0); FakeLibraryModuleURLResolver* aFakeResolver = new FakeLibraryModuleURLResolver(aFileName, aXQuery); theCompilerCB->theRootSctx->add_url_resolver(aFakeResolver); // create a dummy main module and parse it std::stringstream lDocStream; zstring tmp; zorba::xml::escape(lib_namespace, &tmp); lDocStream << "import module namespace m = '" << tmp << "'; 1"; return parse(lDocStream, aFileName); }
CTuningCollection::SERIALIZATION_RETURN_TYPE CTuningCollection::Deserialize(std::istream& iStrm) //---------------------------------------------------------------------------------------------- { std::istream::pos_type startpos = iStrm.tellg(); bool oldLoadingSuccess = false; if(DeserializeOLD(iStrm, oldLoadingSuccess)) { // An old version was not recognised - trying new version. iStrm.clear(); iStrm.seekg(startpos); srlztn::SsbRead ssb(iStrm); ssb.BeginRead("TC", s_SerializationVersion); const srlztn::SsbRead::ReadIterator iterBeg = ssb.GetReadBegin(); const srlztn::SsbRead::ReadIterator iterEnd = ssb.GetReadEnd(); for(srlztn::SsbRead::ReadIterator iter = iterBeg; iter != iterEnd; iter++) { if (ssb.CompareId(iter, "0") == srlztn::SsbRead::IdMatch) ssb.ReadIterItem(iter, m_Name, &ReadStr); else if (ssb.CompareId(iter, "1") == srlztn::SsbRead::IdMatch) ssb.ReadIterItem(iter, m_EditMask); else if (ssb.CompareId(iter, "2") == srlztn::SsbRead::IdMatch) ssb.ReadIterItem(iter, *this, &ReadTuning); } if(ssb.GetStatus() & srlztn::SNT_FAILURE) return true; else return false; } else { if(oldLoadingSuccess) return false; else return true; } }
void yaml_iarchive_t::init( std::istream& is) { RAMEN_ASSERT( is.good()); version_ = 0; header_read_ = false; if( 1) parser_.Load( is); else { is.seekg( 0, std::ios_base::end); std::size_t length = is.tellg(); std::string buffer; buffer.reserve( length); is.read( &buffer[0], length); std::stringstream iis( buffer, std::ios_base::in); parser_.Load( iis); } parser_.GetNextDocument( doc_); root_.reset( new yaml_node_t( this, &doc_, version_)); }
void VBSPReader::processPlanes(std::istream & str, int offset, int length) { int numPlanes; int i; Plane * planes; // Calculate the number of planes numPlanes = length / sizeof(Plane); // Seek to the Planes lump str.seekg(offset); // Read the planes planes = new Plane[numPlanes]; str.read((char *) planes, sizeof(Plane) * numPlanes); // Add the planes to the plane list for (i = 0; i < numPlanes; i++) bsp_data->addPlane(planes[i]); // Clean up delete [] planes; }
void VBSPReader::processVertices(std::istream & str, int offset, int length) { int numVertices; int i; Vec3f * vertices; // Calculate the number of vertices numVertices = length / 3 / sizeof(float); // Seek to the Vertices lump str.seekg(offset); // Read the vertex vertices = new Vec3f[numVertices]; str.read((char *) vertices, sizeof(Vec3f) * numVertices); // Add it the vertices to the list for (i = 0; i < numVertices; i++) bsp_data->addVertex(vertices[i]); // Clean up delete [] vertices; }
void VBSPReader::processEdges(std::istream & str, int offset, int length) { int numEdges; int i; Edge * edges; // Calculate the number of edges numEdges = length / sizeof(Edge); // Seek to the Edges lump str.seekg(offset); // Read the edges edges = new Edge[numEdges]; str.read((char *) edges, sizeof(Edge) * numEdges); // Add the edges to the edge list for (i = 0; i < numEdges; i++) bsp_data->addEdge(edges[i]); // Clean up delete [] edges; }
void VBSPReader::processDispInfo(std::istream & str, int offset, int length) { int numDispInfos; int i; DisplaceInfo * dispinfos; // Calculate the number of dispinfos numDispInfos = length / sizeof(DisplaceInfo); // Seek to the DisplaceInfo lump str.seekg(offset); // Read in the dispinfo entries dispinfos = new DisplaceInfo[numDispInfos]; str.read((char *) dispinfos, sizeof(DisplaceInfo) * numDispInfos); // Add the dispinfo entries to the displace info list for (i = 0; i < numDispInfos; i++) bsp_data->addDispInfo(dispinfos[i]); // Clean up delete [] dispinfos; }
void VBSPReader::processFaces(std::istream & str, int offset, int length) { int numFaces; int i; Face * faces; // Calculate the number of faces numFaces = length / sizeof(Face); // Seek to the Faces lump str.seekg(offset); // Read the faces faces = new Face[numFaces]; str.read((char *) faces, sizeof(Face) * numFaces); // Add the faces to the face list for (i = 0; i < numFaces; i++) bsp_data->addFace(faces[i]); // Clean up delete [] faces; }
void VBSPReader::processTexData(std::istream & str, int offset, int length) { int numTexDatas; int i; TexData * texdatas; // Calculate the number of texdatas numTexDatas = length / sizeof(TexData); // Seek to the TexData lump str.seekg(offset); // Read in the texdata entries texdatas = new TexData[numTexDatas]; str.read((char *) texdatas, sizeof(TexData) * numTexDatas); // Add the texdata entries to the texdata list for (i = 0; i < numTexDatas; i++) bsp_data->addTexData(texdatas[i]); // Clean up delete [] texdatas; }
/** * Virtual function to Read the data * * @param stream InputStream to read data in from * * @throws Isis::IException::Io - Error reading or preparing to read a record */ void Table::ReadData(std::istream &stream) { for (int rec = 0; rec < p_records; rec++) { streampos sbyte = (streampos)(p_startByte - 1) + (streampos)(rec * RecordSize()); stream.seekg(sbyte, std::ios::beg); if (!stream.good()) { QString msg = "Error preparing to read record [" + toString(rec + 1) + "] from Table [" + p_blobName + "]"; throw IException(IException::Io, msg, _FILEINFO_); } char *buf = new char[RecordSize()]; stream.read(buf, RecordSize()); if (!stream.good()) { QString msg = "Error reading record [" + toString(rec + 1) + "] from Table [" + p_blobName + "]"; throw IException(IException::Io, msg, _FILEINFO_); } if (p_swap) p_record.Swap(buf); p_recbufs.push_back(buf); } }
void VBSPReader::processSurfEdges(std::istream & str, int offset, int length) { int numSurfEdges; int i; int * surfEdges; // Calculate the number of edges numSurfEdges = length / sizeof(int); // Seek to the SurfEdges lump str.seekg(offset); // Read the surface edges surfEdges = new int[numSurfEdges]; str.read((char *) surfEdges, sizeof(int) * numSurfEdges); // Add the surface edges to the surface edge list for (i = 0; i < numSurfEdges; i++) bsp_data->addSurfaceEdge(surfEdges[i]); // Clean up delete [] surfEdges; }
void VBSPReader::processDispVerts(std::istream & str, int offset, int length) { int numDispVerts; int i; DisplacedVertex * dispverts; // Calculate the number of displaced vertices numDispVerts = length / sizeof(DisplacedVertex); // Seek to the DispVert lump str.seekg(offset); // Read in the displaced vertices dispverts = new DisplacedVertex[numDispVerts]; str.read((char *) dispverts, sizeof(DisplacedVertex) * numDispVerts); // Add the displaced vertices to the displaced vertex list for (i = 0; i < numDispVerts; i++) bsp_data->addDispVertex(dispverts[i]); // Clean up delete [] dispverts; }