// ------------------------------------------------------------------------------------------------ // Imports a texture file. bool Q3BSPFileImporter::importTextureFromArchive( const Q3BSP::Q3BSPModel *model, Q3BSP::Q3BSPZipArchive *archive, aiScene*, aiMaterial *pMatHelper, int textureId ) { if (nullptr == archive || nullptr == pMatHelper ) { return false; } if ( textureId < 0 || textureId >= static_cast<int>( model->m_Textures.size() ) ) { return false; } bool res = true; sQ3BSPTexture *pTexture = model->m_Textures[ textureId ]; if ( !pTexture ) { return false; } std::vector<std::string> supportedExtensions; supportedExtensions.push_back( ".jpg" ); supportedExtensions.push_back( ".png" ); supportedExtensions.push_back( ".tga" ); std::string textureName, ext; if ( expandFile( archive, pTexture->strName, supportedExtensions, textureName, ext ) ) { IOStream *pTextureStream = archive->Open( textureName.c_str() ); if ( pTextureStream ) { size_t texSize = pTextureStream->FileSize(); aiTexture *pTexture = new aiTexture; pTexture->mHeight = 0; pTexture->mWidth = static_cast<unsigned int>(texSize); unsigned char *pData = new unsigned char[ pTexture->mWidth ]; size_t readSize = pTextureStream->Read( pData, sizeof( unsigned char ), pTexture->mWidth ); (void)readSize; ai_assert( readSize == pTexture->mWidth ); pTexture->pcData = reinterpret_cast<aiTexel*>( pData ); pTexture->achFormatHint[ 0 ] = ext[ 1 ]; pTexture->achFormatHint[ 1 ] = ext[ 2 ]; pTexture->achFormatHint[ 2 ] = ext[ 3 ]; pTexture->achFormatHint[ 3 ] = '\0'; res = true; aiString name; name.data[ 0 ] = '*'; name.length = 1 + ASSIMP_itoa10( name.data + 1, static_cast<unsigned int>(MAXLEN-1), static_cast<int32_t>(mTextures.size()) ); archive->Close( pTextureStream ); pMatHelper->AddProperty( &name, AI_MATKEY_TEXTURE_DIFFUSE( 0 ) ); mTextures.push_back( pTexture ); } else { // If it doesn't exist in the archive, it is probably just a reference to an external file. // We'll leave it up to the user to figure out which extension the file has. aiString name; strncpy( name.data, pTexture->strName, sizeof name.data ); name.length = strlen( name.data ); pMatHelper->AddProperty( &name, AI_MATKEY_TEXTURE_DIFFUSE( 0 ) ); } } return res; }
// LoadNode //------------------------------------------------------------------------------ /*static*/ bool Node::LoadNode( IOStream & stream, Node * & node ) { // read the name of the node AStackString< 512 > nodeName; if ( stream.Read( nodeName ) == false ) { node = nullptr; return false; } // empty name means the pointer was null, which is supported if ( nodeName.IsEmpty() ) { node = nullptr; return true; } // find the node by name - this should never fail NodeGraph & ng = FBuild::Get().GetDependencyGraph(); Node * n = ng.FindNode( nodeName ); if ( n == nullptr ) { node = nullptr; return false; } node = n; return true; }
// Load (SLNDependency) //------------------------------------------------------------------------------ /*static*/ bool SLNDependency::Load( IOStream & stream, Array< SLNDependency > & slnDeps ) { ASSERT( slnDeps.IsEmpty() ); uint32_t num( 0 ); if ( !stream.Read( num ) ) { return false; } slnDeps.SetSize( num ); for ( SLNDependency & deps : slnDeps ) { if ( stream.Read( deps.m_Projects ) == false ) { return false; } if ( stream.Read( deps.m_Dependencies ) == false ) { return false; } } return true; }
static int XMLCALL _readCallback(void *user_data, char *buffer, int len) { IOStream *stream = (IOStream*)user_data; uint32 to_read = (uint32)(len&INT_MAX); uint32 readed = stream->Read(buffer, to_read); if (readed != to_read && !(*stream)) return -1; else return (int)readed; }
// VSProjectFileType::Load //------------------------------------------------------------------------------ /*static*/ bool VSProjectFileType::Load( IOStream & stream, Array< VSProjectFileType > & fileTypes ) { ASSERT( fileTypes.IsEmpty() ); uint32_t numFileTypes( 0 ); if ( !stream.Read( numFileTypes ) ) { return false; } fileTypes.SetSize( numFileTypes ); for ( uint32_t i=0; i<numFileTypes; ++i ) { VSProjectFileType & ft = fileTypes[ i ]; if ( stream.Read( ft.m_FileType ) == false ) { return false; } if ( stream.Read( ft.m_Pattern ) == false ) { return false; } } return true; }
// SLNSolutionFolder::Load //------------------------------------------------------------------------------ /*static*/ bool SLNSolutionFolder::Load( IOStream & stream, Array< SLNSolutionFolder > & solutionFolders ) { ASSERT( solutionFolders.IsEmpty() ); uint32_t numSolutionFolders( 0 ); if ( !stream.Read( numSolutionFolders ) ) { return false; } solutionFolders.SetSize( numSolutionFolders ); for ( uint32_t i=0; i<numSolutionFolders; ++i ) { SLNSolutionFolder & sln = solutionFolders[ i ]; if ( stream.Read( sln.m_Path ) == false ) { return false; } if ( stream.Read( sln.m_ProjectNames ) == false ) { return false; } } return true; }
// Deserialize //------------------------------------------------------------------------------ void Job::Deserialize( IOStream & stream ) { // read jobid stream.Read( m_JobId ); stream.Read( m_RemoteName ); // read properties of node m_Node = Node::LoadRemote( stream ); bool compressed; stream.Read( compressed ); // read extra data uint32_t dataSize; stream.Read( dataSize ); void * data = ALLOC( dataSize ); stream.Read( data, dataSize ); OwnData( data, dataSize, compressed ); }
bool AssbinImporter::CanRead( const std::string& pFile, IOSystem* pIOHandler, bool /*checkSig*/ ) const { IOStream * in = pIOHandler->Open(pFile); if (!in) return false; char s[32]; in->Read( s, sizeof(char), 32 ); pIOHandler->Close(in); return strncmp( s, "ASSIMP.binary-dump.", 19 ) == 0; }
void AssbinImporter::InternReadFile( const std::string& pFile, aiScene* pScene, IOSystem* pIOHandler ) { IOStream * stream = pIOHandler->Open(pFile,"rb"); if (!stream) return; stream->Seek( 44, aiOrigin_CUR ); // signature /*unsigned int versionMajor =*/ Read<unsigned int>(stream); /*unsigned int versionMinor =*/ Read<unsigned int>(stream); /*unsigned int versionRevision =*/ Read<unsigned int>(stream); /*unsigned int compileFlags =*/ Read<unsigned int>(stream); shortened = Read<uint16_t>(stream) > 0; compressed = Read<uint16_t>(stream) > 0; if (shortened) throw DeadlyImportError( "Shortened binaries are not supported!" ); stream->Seek( 256, aiOrigin_CUR ); // original filename stream->Seek( 128, aiOrigin_CUR ); // options stream->Seek( 64, aiOrigin_CUR ); // padding if (compressed) { uLongf uncompressedSize = Read<uint32_t>(stream); uLongf compressedSize = stream->FileSize() - stream->Tell(); unsigned char * compressedData = new unsigned char[ compressedSize ]; stream->Read( compressedData, 1, compressedSize ); unsigned char * uncompressedData = new unsigned char[ uncompressedSize ]; uncompress( uncompressedData, &uncompressedSize, compressedData, compressedSize ); MemoryIOStream io( uncompressedData, uncompressedSize ); ReadBinaryScene(&io,pScene); delete[] uncompressedData; delete[] compressedData; } else { ReadBinaryScene(stream,pScene); } pIOHandler->Close(stream); }
// Load //------------------------------------------------------------------------------ bool Dependencies::Load( NodeGraph & nodeGraph, IOStream & stream ) { uint32_t numDeps; if ( stream.Read( numDeps ) == false ) { return false; } if ( GetCapacity() < GetSize() + numDeps ) { SetCapacity( GetSize() + numDeps ); } for ( uint32_t i=0; i<numDeps; ++i ) { // Read node index uint32_t index( INVALID_NODE_INDEX ); if ( stream.Read( index ) == false ) { return false; } // Convert to Node * Node * node = nodeGraph.GetNodeByIndex( index ); ASSERT( node ); // Read weak flag bool isWeak( false ); if ( stream.Read( isWeak ) == false ) { return false; } // Recombine dependency info Append( Dependency( node, isWeak ) ); } return true; }
// ------------------------------------------------------------------------------------------------ bool Q3BSPFileParser::readData( const std::string &rMapName ) { if ( !m_pZipArchive->Exists( rMapName.c_str() ) ) return false; IOStream *pMapFile = m_pZipArchive->Open( rMapName.c_str() ); if ( NULL == pMapFile ) return false; const size_t size = pMapFile->FileSize(); m_Data.resize( size ); const size_t readSize = pMapFile->Read( &m_Data[0], sizeof( char ), size ); if ( readSize != size ) { m_Data.clear(); return false; } m_pZipArchive->Close( pMapFile ); return true; }
// VSProjectConfig::Load //------------------------------------------------------------------------------ /*static*/ bool VSProjectConfig::Load( NodeGraph & nodeGraph, IOStream & stream, Array< VSProjectConfig > & configs ) { ASSERT( configs.IsEmpty() ); uint32_t numConfigs( 0 ); if ( !stream.Read( numConfigs ) ) { return false; } configs.SetSize( numConfigs ); for ( uint32_t i=0; i<numConfigs; ++i ) { VSProjectConfig & cfg = configs[ i ]; if ( stream.Read( cfg.m_SolutionPlatform ) == false ) { return false; } if ( stream.Read( cfg.m_SolutionConfig ) == false ) { return false; } if ( stream.Read( cfg.m_Platform ) == false ) { return false; } if ( stream.Read( cfg.m_Config ) == false ) { return false; } if ( !Node::LoadNode( nodeGraph, stream, cfg.m_Target ) ) { return false; } if ( stream.Read( cfg.m_BuildCommand ) == false ) { return false; } if ( stream.Read( cfg.m_RebuildCommand ) == false ) { return false; } if ( stream.Read( cfg.m_CleanCommand ) == false ) { return false; } if ( stream.Read( cfg.m_Output ) == false ) { return false; } if ( stream.Read( cfg.m_PreprocessorDefinitions ) == false ) { return false; } if ( stream.Read( cfg.m_IncludeSearchPath ) == false ) { return false; } if ( stream.Read( cfg.m_ForcedIncludes ) == false ) { return false; } if ( stream.Read( cfg.m_AssemblySearchPath ) == false ) { return false; } if ( stream.Read( cfg.m_ForcedUsingAssemblies ) == false ) { return false; } if ( stream.Read( cfg.m_AdditionalOptions ) == false ) { return false; } if ( stream.Read( cfg.m_OutputDirectory ) == false ) { return false; } if ( stream.Read( cfg.m_IntermediateDirectory ) == false ) { return false; } if ( stream.Read( cfg.m_LayoutDir ) == false ) { return false; } if ( stream.Read( cfg.m_LayoutExtensionFilter ) == false ) { return false; } if ( stream.Read( cfg.m_Xbox360DebuggerCommand ) == false ) { return false; } if ( stream.Read( cfg.m_DebuggerFlavor ) == false ) { return false; } if ( stream.Read( cfg.m_AumidOverride ) == false ) { return false; } if ( stream.Read( cfg.m_PlatformToolset ) == false ) { return false; } if ( stream.Read( cfg.m_DeploymentType ) == false ) { return false; } if ( stream.Read( cfg.m_DeploymentFiles ) == false ) { return false; } if ( stream.Read( cfg.m_LocalDebuggerCommandArguments ) == false ) { return false; } if ( stream.Read( cfg.m_LocalDebuggerWorkingDirectory ) == false ) { return false; } if ( stream.Read( cfg.m_LocalDebuggerCommand ) == false ) { return false; } if ( stream.Read( cfg.m_LocalDebuggerEnvironment ) == false ) { return false; } } return true; }
// Deserialize //------------------------------------------------------------------------------ void ToolManifest::Deserialize( IOStream & ms ) { ms.Read( m_ToolId ); ASSERT( m_Files.IsEmpty() ); uint32_t numFiles( 0 ); ms.Read( numFiles ); m_Files.SetCapacity( numFiles ); for ( size_t i=0; i<(size_t)numFiles; ++i ) { AStackString<> name; uint64_t timeStamp( 0 ); uint32_t hash( 0 ); uint32_t contentSize( 0 ); ms.Read( name ); ms.Read( timeStamp ); ms.Read( hash ); ms.Read( contentSize ); m_Files.Append( File( name, timeStamp, hash, nullptr, contentSize ) ); } // determine if any files are remaining from a previous run size_t numFilesAlreadySynchronized = 0; for ( size_t i=0; i<(size_t)numFiles; ++i ) { AStackString<> localFile; GetRemoteFilePath( (uint32_t)i, localFile ); // is this file already present? AutoPtr< FileStream > fileStream( FNEW( FileStream ) ); FileStream & f = *( fileStream.Get() ); if ( f.Open( localFile.Get() ) == false ) { continue; // file not found } if ( f.GetFileSize() != m_Files[ i ].m_ContentSize ) { continue; // file is not complete } AutoPtr< char > mem( (char *)ALLOC( (size_t)f.GetFileSize() ) ); if ( f.Read( mem.Get(), (size_t)f.GetFileSize() ) != f.GetFileSize() ) { continue; // problem reading file } if( Murmur3::Calc32( mem.Get(), (size_t)f.GetFileSize() ) != m_Files[ i ].m_Hash ) { continue; // file contents unexpected } // file present and ok m_Files[ i ].m_FileLock = fileStream.Release(); // NOTE: keep file open to prevent deletions m_Files[ i ].m_SyncState = File::SYNCHRONIZED; numFilesAlreadySynchronized++; } // Generate Environment ASSERT( m_RemoteEnvironmentString == nullptr ); // PATH= AStackString<> basePath; GetRemotePath( basePath ); AStackString<> paths; paths.Format( "PATH=%s", basePath.Get() ); // TMP= AStackString<> normalTmp; Env::GetEnvVariable( "TMP", normalTmp ); AStackString<> tmp; tmp.Format( "TMP=%s", normalTmp.Get() ); // SystemRoot= AStackString<> sysRoot( "SystemRoot=C:\\Windows" ); char * mem = (char *)ALLOC( paths.GetLength() + 1 + tmp.GetLength() + 1 + sysRoot.GetLength() + 1 + 1 ); m_RemoteEnvironmentString = mem; AString::Copy( paths.Get(), mem, paths.GetLength() + 1 ); // including null mem += ( paths.GetLength() + 1 ); // including null AString::Copy( tmp.Get(), mem, tmp.GetLength() + 1 ); // including null mem += ( tmp.GetLength() + 1 ); // including null AString::Copy( sysRoot.Get(), mem, sysRoot.GetLength() + 1 ); // including null mem += ( sysRoot.GetLength() + 1 ); // including null *mem = 0; ++mem; // double null // are all files already present? if ( numFilesAlreadySynchronized == m_Files.GetSize() ) { m_Synchronized = true; } }
// -------------------------------------------------------------------------- // // Function // Name: static SearchForMatchingBlocks(IOStream &, std::map<int64_t, int64_t> &, BlocksAvailableEntry *, int64_t, int32_t[BACKUP_FILE_DIFF_MAX_BLOCK_SIZES]) // Purpose: Find the matching blocks within the file. // Created: 12/1/04 // // -------------------------------------------------------------------------- static void SearchForMatchingBlocks(IOStream &rFile, std::map<int64_t, int64_t> &rFoundBlocks, BlocksAvailableEntry *pIndex, int64_t NumBlocks, int32_t Sizes[BACKUP_FILE_DIFF_MAX_BLOCK_SIZES], DiffTimer *pDiffTimer) { Timer maximumDiffingTime(0, "MaximumDiffingTime"); if(pDiffTimer && pDiffTimer->IsManaged()) { maximumDiffingTime = Timer(pDiffTimer->GetMaximumDiffingTime() * MILLI_SEC_IN_SEC, "MaximumDiffingTime"); } std::map<int64_t, int32_t> goodnessOfFit; // Allocate the hash lookup table BlocksAvailableEntry **phashTable = (BlocksAvailableEntry **)::malloc(sizeof(BlocksAvailableEntry *) * (64*1024)); // Choose a size for the buffer, just a little bit more than the maximum block size int32_t bufSize = Sizes[0]; for(int z = 1; z < BACKUP_FILE_DIFF_MAX_BLOCK_SIZES; ++z) { if(Sizes[z] > bufSize) bufSize = Sizes[z]; } bufSize += 4; ASSERT(bufSize > Sizes[0]); ASSERT(bufSize > 0); if(bufSize > (BACKUP_FILE_MAX_BLOCK_SIZE + 1024)) { THROW_EXCEPTION(BackupStoreException, BadBackupStoreFile) } // TODO: Because we read in the file a scanned block size at a time, // it is likely to be inefficient. Probably will be much better to // calculate checksums for all block sizes in a single pass. // Allocate the buffers. uint8_t *pbuffer0 = (uint8_t *)::malloc(bufSize); uint8_t *pbuffer1 = (uint8_t *)::malloc(bufSize); try { // Check buffer allocation if(pbuffer0 == 0 || pbuffer1 == 0 || phashTable == 0) { // If a buffer got allocated, it will be cleaned up in the catch block throw std::bad_alloc(); } // Flag to abort the run, if too many blocks are found -- avoid using // huge amounts of processor time when files contain many similar blocks. bool abortSearch = false; // Search for each block size in turn // NOTE: Do the smallest size first, so that the scheme for adding // entries in the found list works as expected and replaces smaller blocks // with larger blocks when it finds matches at the same offset in the file. for(int s = BACKUP_FILE_DIFF_MAX_BLOCK_SIZES - 1; s >= 0; --s) { ASSERT(Sizes[s] <= bufSize); BOX_TRACE("Diff pass " << s << ", for block size " << Sizes[s]); // Check we haven't finished if(Sizes[s] == 0) { // empty entry, try next size continue; } // Set up the hash table entries SetupHashTable(pIndex, NumBlocks, Sizes[s], phashTable); // Shift file position to beginning rFile.Seek(0, IOStream::SeekType_Absolute); // Read first block if(rFile.Read(pbuffer0, Sizes[s]) != Sizes[s]) { // Size of file too short to match -- do next size continue; } // Setup block pointers uint8_t *beginnings = pbuffer0; uint8_t *endings = pbuffer1; int offset = 0; // Calculate the first checksum, ready for rolling RollingChecksum rolling(beginnings, Sizes[s]); // Then roll, until the file is exhausted int64_t fileBlockNumber = 0; int64_t fileOffset = 0; int rollOverInitialBytes = 0; while(true) { if(maximumDiffingTime.HasExpired()) { ASSERT(pDiffTimer != NULL); BOX_INFO("MaximumDiffingTime reached - " "suspending file diff"); abortSearch = true; break; } if(pDiffTimer) { pDiffTimer->DoKeepAlive(); } // Load in another block of data, and record how big it is int bytesInEndings = rFile.Read(endings, Sizes[s]); int tmp; // Skip any bytes from a previous matched block if(rollOverInitialBytes > 0 && offset < bytesInEndings) { int spaceLeft = bytesInEndings - offset; int thisRoll = (rollOverInitialBytes > spaceLeft) ? spaceLeft : rollOverInitialBytes; rolling.RollForwardSeveral(beginnings+offset, endings+offset, Sizes[s], thisRoll); offset += thisRoll; fileOffset += thisRoll; rollOverInitialBytes -= thisRoll; if(rollOverInitialBytes) { goto refresh; } } if(goodnessOfFit.count(fileOffset)) { tmp = goodnessOfFit[fileOffset]; } else { tmp = 0; } if(tmp >= Sizes[s]) { // Skip over bigger ready-matched blocks completely rollOverInitialBytes = tmp; int spaceLeft = bytesInEndings - offset; int thisRoll = (rollOverInitialBytes > spaceLeft) ? spaceLeft : rollOverInitialBytes; rolling.RollForwardSeveral(beginnings+offset, endings+offset, Sizes[s], thisRoll); offset += thisRoll; fileOffset += thisRoll; rollOverInitialBytes -= thisRoll; if(rollOverInitialBytes) { goto refresh; } } while(offset < bytesInEndings) { // Is current checksum in hash list? uint16_t hash = rolling.GetComponentForHashing(); if(phashTable[hash] != 0 && (goodnessOfFit.count(fileOffset) == 0 || goodnessOfFit[fileOffset] < Sizes[s])) { if(SecondStageMatch(phashTable[hash], rolling, beginnings, endings, offset, Sizes[s], fileBlockNumber, pIndex, rFoundBlocks)) { BOX_TRACE("Found block match of " << Sizes[s] << " bytes with hash " << hash << " at offset " << fileOffset); goodnessOfFit[fileOffset] = Sizes[s]; // Block matched, roll the checksum forward to the next block without doing // any more comparisons, because these are pointless (as any more matches will be ignored when // the recipe is generated) and just take up valuable processor time. Edge cases are // especially nasty, using huge amounts of time and memory. int skip = Sizes[s]; if(offset < bytesInEndings && skip > 0) { int spaceLeft = bytesInEndings - offset; int thisRoll = (skip > spaceLeft) ? spaceLeft : skip; rolling.RollForwardSeveral(beginnings+offset, endings+offset, Sizes[s], thisRoll); offset += thisRoll; fileOffset += thisRoll; skip -= thisRoll; } // Not all the bytes necessary will have been skipped, so get them // skipped after the next block is loaded. rollOverInitialBytes = skip; // End this loop, so the final byte isn't used again break; } else { // Too many to log // BOX_TRACE("False alarm match of " << Sizes[s] << " bytes with hash " << hash << " at offset " << fileOffset); } int64_t NumBlocksFound = static_cast<int64_t>( rFoundBlocks.size()); int64_t MaxBlocksFound = NumBlocks * BACKUP_FILE_DIFF_MAX_BLOCK_FIND_MULTIPLE; if(NumBlocksFound > MaxBlocksFound) { abortSearch = true; break; } } // Roll checksum forward rolling.RollForward(beginnings[offset], endings[offset], Sizes[s]); // Increment offsets ++offset; ++fileOffset; } if(abortSearch) break; refresh: // Finished? if(bytesInEndings != Sizes[s]) { // No more data in file -- check the final block // (Do a copy and paste of 5 lines of code instead of introducing a comparison for // each byte of the file) uint16_t hash = rolling.GetComponentForHashing(); if(phashTable[hash] != 0 && (goodnessOfFit.count(fileOffset) == 0 || goodnessOfFit[fileOffset] < Sizes[s])) { if(SecondStageMatch(phashTable[hash], rolling, beginnings, endings, offset, Sizes[s], fileBlockNumber, pIndex, rFoundBlocks)) { goodnessOfFit[fileOffset] = Sizes[s]; } } // finish break; } // Switch buffers, reset offset beginnings = endings; endings = (beginnings == pbuffer0)?(pbuffer1):(pbuffer0); // ie the other buffer offset = 0; // And count the blocks which have been done ++fileBlockNumber; } if(abortSearch) break; } // Free buffers and hash table ::free(pbuffer1); pbuffer1 = 0; ::free(pbuffer0); pbuffer0 = 0; ::free(phashTable); phashTable = 0; } catch(...) { // Cleanup and throw if(pbuffer1 != 0) ::free(pbuffer1); if(pbuffer0 != 0) ::free(pbuffer0); if(phashTable != 0) ::free(phashTable); throw; } #ifndef BOX_RELEASE_BUILD if(BackupStoreFile::TraceDetailsOfDiffProcess) { // Trace out the found blocks in debug mode BOX_TRACE("Diff: list of found blocks"); BOX_TRACE("======== ======== ======== ========"); BOX_TRACE(" Offset BlkIdx Size Movement"); for(std::map<int64_t, int64_t>::const_iterator i(rFoundBlocks.begin()); i != rFoundBlocks.end(); ++i) { int64_t orgLoc = 0; for(int64_t b = 0; b < i->second; ++b) { orgLoc += pIndex[b].mSize; } BOX_TRACE(std::setw(8) << i->first << " " << std::setw(8) << i->second << " " << std::setw(8) << pIndex[i->second].mSize << " " << std::setw(8) << (i->first - orgLoc)); } BOX_TRACE("======== ======== ======== ========"); } #endif }
// -------------------------------------------------------------------------- // // Function // Name: static LoadIndex(IOStream &, int64_t, BlocksAvailableEntry **, int64_t, bool &) // Purpose: Read in an index, and decrypt, and store in the in memory block format. // rCanDiffFromThis is set to false if the version of the from file is too old. // Created: 12/1/04 // // -------------------------------------------------------------------------- static void LoadIndex(IOStream &rBlockIndex, int64_t ThisID, BlocksAvailableEntry **ppIndex, int64_t &rNumBlocksOut, int Timeout, bool &rCanDiffFromThis) { // Reset rNumBlocksOut = 0; rCanDiffFromThis = false; // Read header file_BlockIndexHeader hdr; if(!rBlockIndex.ReadFullBuffer(&hdr, sizeof(hdr), 0 /* not interested in bytes read if this fails */, Timeout)) { // Couldn't read header THROW_EXCEPTION(BackupStoreException, CouldntReadEntireStructureFromStream) } #ifndef BOX_DISABLE_BACKWARDS_COMPATIBILITY_BACKUPSTOREFILE // Check against backwards comptaibility stuff if(hdr.mMagicValue == (int32_t)htonl(OBJECTMAGIC_FILE_BLOCKS_MAGIC_VALUE_V0)) { // Won't diff against old version // Absorb rest of stream char buffer[2048]; while(rBlockIndex.StreamDataLeft()) { rBlockIndex.Read(buffer, sizeof(buffer), 1000 /* 1 sec timeout */); } // Tell caller rCanDiffFromThis = false; return; } #endif // Check magic if(hdr.mMagicValue != (int32_t)htonl(OBJECTMAGIC_FILE_BLOCKS_MAGIC_VALUE_V1)) { THROW_EXCEPTION(BackupStoreException, BadBackupStoreFile) } // Check that we're not trying to diff against a file which references blocks from another file if(((int64_t)box_ntoh64(hdr.mOtherFileID)) != 0) { THROW_EXCEPTION(BackupStoreException, CannotDiffAnIncompleteStoreFile) } // Mark as an acceptable diff. rCanDiffFromThis = true; // Get basic information int64_t numBlocks = box_ntoh64(hdr.mNumBlocks); uint64_t entryIVBase = box_ntoh64(hdr.mEntryIVBase); //TODO: Verify that these sizes look reasonable // Allocate space for the index BlocksAvailableEntry *pindex = (BlocksAvailableEntry*)::malloc(sizeof(BlocksAvailableEntry) * numBlocks); if(pindex == 0) { throw std::bad_alloc(); } try { for(int64_t b = 0; b < numBlocks; ++b) { // Read an entry from the stream file_BlockIndexEntry entry; if(!rBlockIndex.ReadFullBuffer(&entry, sizeof(entry), 0 /* not interested in bytes read if this fails */, Timeout)) { // Couldn't read entry THROW_EXCEPTION(BackupStoreException, CouldntReadEntireStructureFromStream) } // Calculate IV for this entry uint64_t iv = entryIVBase; iv += b; // Network byte order iv = box_hton64(iv); sBlowfishDecryptBlockEntry.SetIV(&iv); // Decrypt the encrypted section file_BlockIndexEntryEnc entryEnc; int sectionSize = sBlowfishDecryptBlockEntry.TransformBlock(&entryEnc, sizeof(entryEnc), entry.mEnEnc, sizeof(entry.mEnEnc)); if(sectionSize != sizeof(entryEnc)) { THROW_EXCEPTION(BackupStoreException, BlockEntryEncodingDidntGiveExpectedLength) } // Check that we're not trying to diff against a file which references blocks from another file if(((int64_t)box_ntoh64(entry.mEncodedSize)) <= 0) { THROW_EXCEPTION(BackupStoreException, CannotDiffAnIncompleteStoreFile) } // Store all the required information pindex[b].mpNextInHashList = 0; // hash list not set up yet pindex[b].mSize = ntohl(entryEnc.mSize); pindex[b].mWeakChecksum = ntohl(entryEnc.mWeakChecksum); ::memcpy(pindex[b].mStrongChecksum, entryEnc.mStrongChecksum, sizeof(pindex[b].mStrongChecksum)); } // Store index pointer for called ASSERT(ppIndex != 0); *ppIndex = pindex; // Store number of blocks for caller rNumBlocksOut = numBlocks; } catch(...) { // clean up and send the exception along its way ::free(pindex); throw; } }
// Deserialize //------------------------------------------------------------------------------ bool Node::Deserialize( IOStream & stream ) { // Deps NODE_LOAD_DEPS( 0, preBuildDeps ); ASSERT( m_PreBuildDependencies.IsEmpty() ); m_PreBuildDependencies.Append( preBuildDeps ); NODE_LOAD_DEPS( 0, staticDeps ); ASSERT( m_StaticDependencies.IsEmpty() ); m_StaticDependencies.Append( staticDeps ); NODE_LOAD_DEPS( 0, dynamicDeps ); ASSERT( m_DynamicDependencies.IsEmpty() ); m_DynamicDependencies.Append( dynamicDeps ); // Properties const ReflectionInfo * const ri = GetReflectionInfoV(); const ReflectionIter end = ri->End(); for ( ReflectionIter it = ri->Begin(); it != end; ++it ) { const ReflectedProperty & property = *it; const PropertyType pt = property.GetType(); switch ( pt ) { case PT_ASTRING: { if ( property.IsArray() ) { Array< AString > arrayOfStrings; // TODO:C Eliminate this copy if ( stream.Read( arrayOfStrings ) == false ) { return false; } property.SetProperty( this, arrayOfStrings ); } else { AStackString<> string; // TODO:C remove this copy if ( stream.Read( string ) == false ) { return false; } property.SetProperty( this, string ); } break; } case PT_BOOL: { bool b( false ); if ( stream.Read( b ) == false ) { return false; } property.SetProperty( this, b ); break; } case PT_UINT32: { uint32_t u32( 0 ); if ( stream.Read( u32 ) == false ) { return false; } property.SetProperty( this, u32 ); break; } default: { ASSERT( false ); // Unsupported type break; } } } return true; }