bool CMapLoaderGalaxy::gotoNextSignature(std::ifstream &MapFile) { // try the original "!ID!" Sig... size_t pos = findInStream(MapFile, "!ID!"); MapFile.seekg( pos, std::ios::beg ); if(pos != std::string::npos) return true; gLogging.textOut("Warning! Your are opening a map which is not correctly signed. Some Mods, using different Editors, have that issue!!"); gLogging.textOut("If you are playing a mod it might okay though. If it's an original game, it is tainted and you should get a better copy. Continuing..."); return false; }
/** * * Determines what are the dimensions of a labyrinth stored in a file. * * \param [in] InputFile * The file to read data from * * \param [out] RowsCount * A variable which will receive the numbe of rows in the labyrinth. * The variable will be altered only if the file contains valid data. * * \param [out] ColsCount * A variable which will receive the numbe of columns in the labyrinth. * The variable will be altered only if the file contains valid data. * * \return * true if the file contains valid data and the dimensions were written * to RowsCount and ColsCount. Otherwise the function returns false. * */ bool Board::GetBoardDimensionsFromFile(std::ifstream & InputFile, int& RowsCount, int& ColsCount) { InputFile.clear(); InputFile.seekg(0, std::ios::beg); int rows = 0; // Number of rows in the board int cols = 0; // Number of columns in the board char c = 0; int counter = 0; // Find the number of columns in the board while (InputFile.get(c) && c != '\n') cols++; // Find the number of rows in the board and also // verify that all rows have the same number of columns if (cols > 0) { rows++; // at leas one row was read from the file while (InputFile.get(c)) { if (c == '\n') { // the number of columns on each line must be the same if (cols != counter) return false; rows++; counter = 0; } else { counter++; } } // The last row of the labyrinth may or may not be followed by a blank line // Thus if we just count the number of new lines, we may count one row less if (c != '\n') rows++; } RowsCount = rows; ColsCount = cols; return true; }
bool inspect<RopeHeader>(std::ifstream& in, size_type&, size_type&) { in.seekg(0); RopeHeader header; header.load(in); if(!(header.check())) { return false; } in.close(); std::cout << header << std::endl; return true; }
uint64_t BootstrapFile::seek_to_first_chunk(std::ifstream& import_file) { uint32_t file_magic; std::string str1; char buf1[2048]; import_file.read(buf1, sizeof(file_magic)); if (! import_file) throw std::runtime_error("Error reading expected number of bytes"); str1.assign(buf1, sizeof(file_magic)); if (! ::serialization::parse_binary(str1, file_magic)) throw std::runtime_error("Error in deserialization of file_magic"); if (file_magic != blockchain_raw_magic) { LOG_PRINT_RED_L0("bootstrap file not recognized"); throw std::runtime_error("Aborting"); } else LOG_PRINT_L0("bootstrap file recognized"); uint32_t buflen_file_info; import_file.read(buf1, sizeof(buflen_file_info)); str1.assign(buf1, sizeof(buflen_file_info)); if (! import_file) throw std::runtime_error("Error reading expected number of bytes"); if (! ::serialization::parse_binary(str1, buflen_file_info)) throw std::runtime_error("Error in deserialization of buflen_file_info"); LOG_PRINT_L1("bootstrap::file_info size: " << buflen_file_info); if (buflen_file_info > sizeof(buf1)) throw std::runtime_error("Error: bootstrap::file_info size exceeds buffer size"); import_file.read(buf1, buflen_file_info); if (! import_file) throw std::runtime_error("Error reading expected number of bytes"); str1.assign(buf1, buflen_file_info); bootstrap::file_info bfi; if (! ::serialization::parse_binary(str1, bfi)) throw std::runtime_error("Error in deserialization of bootstrap::file_info"); LOG_PRINT_L0("bootstrap file v" << unsigned(bfi.major_version) << "." << unsigned(bfi.minor_version)); LOG_PRINT_L0("bootstrap magic size: " << sizeof(file_magic)); LOG_PRINT_L0("bootstrap header size: " << bfi.header_size); uint64_t full_header_size = sizeof(file_magic) + bfi.header_size; import_file.seekg(full_header_size); return full_header_size; }
int TransFunc1DKeys::openImageJBinary(std::ifstream& fileStream, bool raw) { // the default quantity of colors int numColors = 256; if (!raw) { // read the header information seperatly int id = readInt(fileStream); if (id != 1229147980) { // a leading 1229147980 (= 'ICOL') indicates an NIH Image LUT // if ICOL isn't the first entry in the table, let the other procedures handle it // but first, go back to the beginning fileStream.seekg(std::ios::beg); return 0; } readShort(fileStream); // Version numColors = readShort(fileStream); // Number of Colors readShort(fileStream); // Start readShort(fileStream); // End readDouble(fileStream); // Filler1 readDouble(fileStream); // Filler2 readInt(fileStream); // Filler3 } // The colors in a binary table are saved in succession so // first load the reds, then greens and at last blues char redColors[256]; char greenColors[256]; char blueColors[256]; try { fileStream.read(&redColors[0], numColors); fileStream.read(&greenColors[0], numColors); fileStream.read(&blueColors[0], numColors); } catch (...) { throw; } unsigned char data[256*4]; for (int i = 0; i < 256; ++i) { data[i*4 + 0] = redColors[i]; data[i*4 + 1] = greenColors[i]; data[i*4 + 2] = blueColors[i]; data[i*4 + 3] = (char)(255); } dimensions_ = tgt::ivec3(256, 1, 1); generateKeys(&data[0]); return 256; }
void Q3BSPLoad::LoadLightmaps(std::ifstream& aFile) { //Calculate number of lightmaps int num_lightmaps=m_header.m_directoryEntries[bspLightmaps].m_length/sizeof(BSP_LOAD_LIGHTMAP); //Create space for this many BSP_LOAD_LIGHTMAPs m_loadLightmaps.resize(num_lightmaps); //Load textures aFile.seekg(m_header.m_directoryEntries[bspLightmaps].m_offset,std::ios::beg); aFile.read((char*)&m_loadLightmaps[0], m_header.m_directoryEntries[bspLightmaps].m_length); //Change the gamma settings on the lightmaps (make them brighter) float gamma=2.5f; for(int i=0; i<num_lightmaps; ++i) { for(int j=0; j<128*128; ++j) { float r, g, b; r=m_loadLightmaps[i].m_lightmapData[j*3+0]; g=m_loadLightmaps[i].m_lightmapData[j*3+1]; b=m_loadLightmaps[i].m_lightmapData[j*3+2]; r*=gamma/255.0f; g*=gamma/255.0f; b*=gamma/255.0f; //find the value to scale back up float scale=1.0f; float temp; if(r > 1.0f && (temp = (1.0f/r)) < scale) scale=temp; if(g > 1.0f && (temp = (1.0f/g)) < scale) scale=temp; if(b > 1.0f && (temp = (1.0f/b)) < scale) scale=temp; // scale up color values scale*=255.0f; r*=scale; g*=scale; b*=scale; //fill data back in m_loadLightmaps[i].m_lightmapData[j*3+0]=(unsigned char)r; m_loadLightmaps[i].m_lightmapData[j*3+1]=(unsigned char)g; m_loadLightmaps[i].m_lightmapData[j*3+2]=(unsigned char)b; //m_loadLightmaps[i].m_lightmapData[j*3+0]=(GLubyte)255; //m_loadLightmaps[i].m_lightmapData[j*3+1]=(GLubyte)255; //m_loadLightmaps[i].m_lightmapData[j*3+2]=(GLubyte)255; } } }
void Q3BSPLoad::LoadVertices(std::ifstream& aFile) { //calculate number of vertices int num_vertices=m_header.m_directoryEntries[bspVertices].m_length/sizeof(BSP_LOAD_VERTEX); //Create space for this many BSP_LOAD_VERTICES m_loadVertices.resize(num_vertices); //go to vertices in file aFile.seekg(m_header.m_directoryEntries[bspVertices].m_offset,std::ios::beg); //read in the vertices aFile.read((char*)&m_loadVertices[0], m_header.m_directoryEntries[bspVertices].m_length); }
/** * Reads the headers of a region file: chunk offsets/timestamps */ bool RegionFile::readHeaders(std::ifstream& file) { if (!file) return false; containing_chunks.clear(); for (int i = 0; i < 1024; i++) { chunk_offsets[i] = 0; chunk_timestamps[i] = 0; } for (int x = 0; x < 32; x++) { for (int z = 0; z < 32; z++) { file.seekg(4 * (x + z * 32), std::ios::beg); int tmp; file.read((char*) &tmp, 4); if (tmp == 0) continue; int offset = be32toh(tmp << 8) * 4096; //uint8_t sectors = ((uint8_t*) &tmp)[3]; file.seekg(4096, std::ios::cur); int timestamp; file.read((char*) ×tamp, 4); timestamp = be32toh(timestamp); ChunkPos pos(x + regionpos.x * 32, z + regionpos.z * 32); if (rotation) pos.rotate(rotation); containing_chunks.insert(pos); chunk_offsets[z * 32 + x] = offset; chunk_timestamps[z * 32 + x] = timestamp; } } return true; }
void fill_nt_structures(std::ifstream &file) { /* Remember where the caller was in the file */ std::streampos off = file.tellg(); file.seekg(imgDosHeader.e_lfanew, std::ios::beg); file.read((char*)&imgNtHeaders, get_nt_headers_size()); file.seekg(imgDosHeader.e_lfanew, std::ios::beg); /* This offset is relative to the NT Header, do not forget to move the file pointer on it */ file.seekg(imgNtHeaders.get_offset_first_section(), std::ios::cur); for(unsigned int i = 0; i < imgNtHeaders.FileHeader.NumberOfSections; ++i) { RP_IMAGE_SECTION_HEADER* pImgSectionHeader = new (std::nothrow) RP_IMAGE_SECTION_HEADER; if(pImgSectionHeader == NULL) RAISE_EXCEPTION("Cannot allocate memory for pImgSectionHeader"); file.read((char*)pImgSectionHeader, get_image_section_header_size()); imgSectionHeaders.push_back(pImgSectionHeader); } file.seekg(off); }
/** * get the data at the ith position * throws out of file exception */ LaserxaData get_data (size_t i) { if (i >= data_count_) { throw LxaEndException (); } const size_t pos = sizeof(LaserxaInfo) + i * sizeof(LaserxaData); LaserxaData d; file_in_.seekg (pos, std::ios::beg); file_in_.read ((char*) &d, sizeof(LaserxaData)); return d; }
//! TODO static void _read_variable_length_records(std::ifstream &ifs, const public_header_block &phb, std::vector<variable_length_record> *vlr) { if (0 != vlr) { } else { // Skip past VLR data. ifs.seekg( phb.offset_to_data - sizeof(public_header_block) - sizeof(uint16), std::ios_base::cur); } }
std::string getLastLine(std::ifstream& in) { std::ifstream::pos_type pos = in.tellg(); std::ifstream::pos_type lastPos; while (in >> std::ws && ignoreline(in, lastPos)) pos = lastPos; in.clear(); in.seekg(pos); std::string line; std::getline(in, line); return line; }
bool WLMatLib::MATReader::readTagField( mDataType_t* const dataType, mNumBytes_t* const numBytes, std::ifstream& ifs ) { const std::streampos pos = ifs.tellg(); ifs.read( ( char* )dataType, sizeof(WLMatLib::mDataType_t) ); ifs.read( ( char* )numBytes, sizeof(WLMatLib::mNumBytes_t) ); if( *dataType > WLMatLib::DataTypes::miUTF32 ) { wlog::debug( LIBNAME ) << "Small Data Element Format found."; WLMatLib::mDataTypeSmall_t typeSmall; WLMatLib::mNumBytesSmall_t bytesSmall; ifs.seekg( -( sizeof(WLMatLib::mDataType_t) + sizeof(WLMatLib::mNumBytes_t) ), ifstream::cur ); ifs.read( ( char* )&typeSmall, sizeof(WLMatLib::mDataTypeSmall_t) ); ifs.read( ( char* )&bytesSmall, sizeof(WLMatLib::mNumBytesSmall_t) ); *dataType = typeSmall; *numBytes = bytesSmall; } if( *dataType > WLMatLib::DataTypes::miUTF32 ) { wlog::error( LIBNAME ) << "Unknown data type or wrong data structure!"; ifs.seekg( pos ); return false; } return true; }
Copier(CopyDescriptor desc, unsigned block_size) : _desc(desc), _src(_desc.src.c_str()), _dest(_desc.dest.c_str()), _block_size(block_size) { _done = false; // get length of the source file _src.seekg (0, std::ios::end); _src_length = _src.tellg(); _src.seekg (0, std::ios::beg); _src_offset = 0; if(_src_length == 0) _done = true; if(!_src.good()) throw "Unable to open source file!"; if(!_dest.good()) throw "Unable to open dest file!"; }
inline std::pair<uint32_t, uint32_t> lookup3(std::ifstream &stream, uint32_t length, const std::pair<uint32_t, uint32_t> &init = { 0, 0 }) { auto pc = init.first; auto pb = init.second; std::vector<char> buffer(length); auto pos = stream.tellg(); stream.read(buffer.data(), length); hashlittle2(buffer.data(), length, &pc, &pb); stream.seekg(pos); return std::make_pair(pc, pb); }
bool FindChunk(std::ifstream& stream, DWORD fourcc, DWORD& dwChunkSize, DWORD& dwChunkDataPosition){ stream.seekg(0, std::ios::beg); DWORD dwChunkType(0), dwChunkDataSize(0), dwRIFFDataSize(0), dwFileType(0), dwOffset(0); while(!stream.eof()){// as long as we have not hit the end of the file // riff standard is always type, followied by the chunk size. They are always 4 bytes too, stream.read(reinterpret_cast<char*>(&dwChunkType), 4); stream.read(reinterpret_cast<char*>(&dwChunkDataSize), 4); if( fourccRIFF ==dwChunkType){// at the riff header dwRIFFDataSize = dwChunkDataSize; dwChunkDataSize = 4; stream.read(reinterpret_cast<char*>(&dwFileType), 4); } else stream.seekg(dwChunkDataSize, std::ios::cur);// skip this chunk dwOffset += sizeof(DWORD) * 2; if (dwChunkType == fourcc){// found what we were looking for dwChunkSize = dwChunkDataSize; dwChunkDataPosition = dwOffset; return true; } dwOffset += dwChunkDataSize; } char* temp = reinterpret_cast<char*>(&fourcc); OUTPUT_DEBUG_MSG("Could not find the chunk "<<temp[0]<<temp[1]<<temp[2]<<temp[3]); return false;// if this is hit, it means what we were searching for was not found }
/** * Checks if there is an x error present in the data set * @param stream:: the stream object */ bool LoadRKH::hasXerror(std::ifstream &stream) { auto containsXerror = false; auto currentPutLocation = stream.tellg(); std::string line; getline(stream, line); std::string x, y, yerr, xerr; std::istringstream datastr(line); datastr >> x >> y >> yerr >> xerr; if (!xerr.empty()) { containsXerror = true; } // Reset the original location of the stream stream.seekg(currentPutLocation, stream.beg); return containsXerror; }
void LoadTexCoords(std::vector<Vec2f>& uv, std::ifstream& f, unsigned int) { f.clear(std::ios_base::goodbit); f.seekg(0, std::ios_base::beg); while (!f.eof()) { char buf[1024] = { 0 }; char buf1[64] = { 0 }; f.getline(buf, 1024); sscanf(buf, "%s", buf1); if (strcmp(buf1, "vt") == 0) { uv.resize(uv.size() + 1); LoadTexCoord(uv.back(), buf + 3); } } }
/****************************************************************************************************** Function: void saveAs(std::ifstream& ioStream, std::ofstream& saveto, std::string filename); Description:Saves numOfCoins to a output file. Parameters: std::ifstream& ioStream, std::ofstream& saveto, std::string filename Pre-Conditions: Must have an input and output file. Post-Conditions: The ifstream file is copied to ofstream file. ******************************************************************************************************/ void saveAs(std::ifstream& ifStream, std::ofstream& saveto, std::string filename) { output_file_check(saveto, filename); std::string line;//Holds each line of text from the input file. getline(ifStream, line); while (ifStream) { saveto << line << std::endl;//Copies input file text to output file. getline(ifStream, line); } ifStream.clear();//clears the end of file flag (eof). ifStream.seekg(0L, std::ios::beg);//Move back to the beginning of the input file stream. }
bool inspect(std::ifstream& in, size_type& total_sequences, size_type& total_bases) { in.seekg(0); HeaderFormat header; header.load(in); if(!(header.check())) { return false; } total_sequences += header.sequences; total_bases += header.bases; in.close(); std::cout << header << std::endl; return true; }
void LoadVertices(Mesh* res, std::ifstream& f, unsigned int) { f.clear(std::ios_base::goodbit); f.seekg(0, std::ios_base::beg); while (!f.eof()) { char buf[1024] = { 0 }; char buf1[64] = { 0 }; f.getline(buf, 1024); sscanf(buf, "%s", buf1); if (strcmp(buf1, "v") == 0) { res->vertices.resize(res->vertices.size() + 1); LoadVertex(res->vertices.back(), buf + 2); } } }
/** * Count the number of columns in the first line of the text file * @param logFileStream :: stream to the file * @param logFileName :: name for the log file */ int LoadLog::countNumberColumns(std::ifstream& logFileStream, const std::string& logFileName) { if (!logFileStream) { throw std::invalid_argument("Unable to open file " + m_filename); } std::string str; kind l_kind(LoadLog::empty); //extract first line of file Mantid::Kernel::Strings::extractToEOL(logFileStream,str); if ( !isDateTimeString(str) ) { throw std::invalid_argument("File" + logFileName + " is not a standard ISIS log file. Expected to be a file starting with DateTime String format."); } std::stringstream line(str); std::string timecolumn; line >> timecolumn; std::string blockcolumn; line >> blockcolumn; l_kind = classify(blockcolumn); if ( LoadLog::string != l_kind && LoadLog::number != l_kind ) { throw std::invalid_argument("ISIS log file contains unrecognised second column entries:" + logFileName); } std::string valuecolumn; line >> valuecolumn; l_kind = classify(valuecolumn); //reset file back to the beginning logFileStream.seekg(0); if ( LoadLog::string != l_kind && LoadLog::number != l_kind) { return 2; //looks like a two column file } else { return 3; //looks like a three column file } }
std::vector<Her::AVF_idxEntry> getAVFindex(std::ifstream & theavf, uint16_t numEntries) { theavf.seekg(0x21); char * rawData = new char[19]; std::vector<Her::AVF_idxEntry> jumpList(numEntries); for (int i=0;i<numEntries;i++) { theavf.read(rawData, 19); uint16_t entNum = ((uint16_t*)rawData)[0]; jumpList.at(entNum).frameNo = entNum; jumpList.at(entNum).startAt = ((uint32_t*)(rawData+2))[0]; jumpList.at(entNum).goFor = ((uint32_t*)(rawData+2))[1]; } delete[] rawData; return jumpList; }
void rellenaLista(std::list<NodeCandidate> &listHistCandidate, std::ifstream &fichCandidate) { NodeCandidate nodeAux; long positionCandidate = 0; char basura; while( !fichCandidate.eof() ) { if (positionCandidate != 0) fichCandidate.seekg(positionCandidate, fichCandidate.beg); nodeAux.tipo = leerHistograma(nodeAux.hisCandidate, nodeAux.img, fichCandidate); listHistCandidate.push_back(nodeAux); positionCandidate = fichCandidate.tellg(); fichCandidate >> basura; } }
/* * Reads all the DataGroupHeaders in a file and all information for each DataSetHeader in every DataGroup. */ void DataGroupHeaderReader::ReadAll(std::ifstream& fileStream, FileHeader& fh, u_int32_t dataGroupCnt) { // Get the first data group offset u_int32_t nextDataGroupFilePos = fh.GetFirstDataGroupFilePos(); for (u_int32_t i = 0; i < dataGroupCnt; ++i) { // Read the DataGroupHeader DataGroupHeader dch; // Move to the indicated position in the file fileStream.seekg(nextDataGroupFilePos, std::ios_base::beg); nextDataGroupFilePos = Read(fileStream, dch); fh.AddDataGroupHdr(dch); } }
bool configfile::line_after (std::ifstream & file, const std::string & tag) { bool result = false; file.clear(); file.seekg(0, std::ios::beg); file.getline(m_line, sizeof(m_line)); while (! file.eof()) { result = strncmp(m_line, tag.c_str(), tag.length()) == 0; if (result) break; else file.getline(m_line, sizeof(m_line)); } (void) next_data_line(file); return result; }
static void copyKeyword( std::ifstream& is , std::ofstream& os) { std::ios::pos_type start_pos = is.tellg(); skipKeyword( is ); { std::ios::pos_type end_pos = is.tellg(); long length = end_pos - start_pos; { char * buffer = new char[length]; { is.seekg( start_pos ); is.read( buffer , length ); } os.write( buffer , length ); delete[] buffer; } } }
//father: xxx map chunk //child:none void ReadAndParseMapChunk(std::string& outGeneratedTextureName) { //please refer to project "Assimp" for more chunk information uint16_t chunkID = 0; uint32_t chunkLength = 0; BINARY_READ(chunkID); BINARY_READ(chunkLength); /*case Discreet3DS::CHUNK_PERCENTF: case Discreet3DS::CHUNK_PERCENTW: case Discreet3DS::CHUNK_MAT_MAP_USCALE: case Discreet3DS::CHUNK_MAT_MAP_VSCALE: case Discreet3DS::CHUNK_MAT_MAP_UOFFSET: case Discreet3DS::CHUNK_MAT_MAP_VOFFSET: case Discreet3DS::CHUNK_MAT_MAP_ANG: case Discreet3DS::CHUNK_MAT_MAP_TILING:*/ switch (chunkID) { case NOISE_3DS_CHUNKID_MAPPING_FILENAME: { std::string texFilePath; //back() can retrieve the current processing material std::string texName=c_defaultTexNamePrefix + std::to_string(static_textureMapIndex); ++static_textureMapIndex; //...Read File Path From File ReadStringFromFileA(texFilePath); //texName-filePath pair,so that texture creation could be done static_TexName2FilePathPairList.insert(std::make_pair(texName,texFilePath.c_str())); outGeneratedTextureName = std::move(texName); } break; default: //skip this color chunk fileIn.seekg(chunkLength - c_chunkHeadByteLength, std::ios::cur); break; } }
void get_meta_data(std::ifstream& file, graph *graph) { // going back to the beginning of the file file.clear(); file.seekg(0, std::ios::beg); std::string buffer; std::getline(file, buffer); if ((buffer.compare(std::string("AdjacencyGraph")))) { std::cout << "Invalid input file" << buffer << std::endl; exit(1); } buffer.clear(); std::getline(file, buffer); graph->num_nodes = atoi(buffer.c_str()); buffer.clear(); std::getline(file, buffer); graph->num_edges = atoi(buffer.c_str()); }
bool fastaTrack::read(std::ifstream &in) { std::string line; std::getline(in,line); if(line[0]!='>') return false; name=line.substr(1); while(!in.eof()) { std::streampos pos=in.tellg(); std::getline(in,line); if(line[0]=='>') { in.seekg(pos); break; } seq+=line; } return true; };