////////////////////////////////////////////////////////////////////////// // fileChunkReady void ScnTexture::fileChunkReady( BcU32 ChunkIdx, BcU32 ChunkID, void* pData ) { // If we have no render core get chunk 0 so we keep getting entered into. if( RsCore::pImpl() == NULL ) { requestChunk( 0 ); return; } if( ChunkID == BcHash( "header" ) ) { // Request all texture levels. for( BcU32 iLevel = 0; iLevel < Header_.Levels_; ++iLevel ) { requestChunk( ++ChunkIdx ); } // We update the header, create a new texture rather than updating. CreateNewTexture_ = BcTrue; } else if( ChunkID == BcHash( "body" ) ) { // Grab pointer to data. BcAssert( pTextureData_ == NULL || pTextureData_ == pData ); pTextureData_ = pData; // Setup. setup(); } }
////////////////////////////////////////////////////////////////////////// // addChunk BcU32 CsPackageImporter::addChunk( BcU32 ID, const void* pData, BcSize Size, BcSize RequiredAlignment, BcU32 Flags ) { std::lock_guard< std::recursive_mutex > Lock( BuildingLock_ ); BcAssert( BuildingBeginCount_ > 0 ); BcAssert( Size > 0 ); BcAssert( BcPot( RequiredAlignment ) ); BcAssert( RequiredAlignment <= 4096 ); const BcU8* pPackedData = reinterpret_cast< const BcU8* >( pData ); size_t PackedSize = Size; BcBool HaveCompressed = BcFalse; // If we need to compress, do so. if( ( Flags & csPCF_COMPRESSED ) != 0 ) { if( BcCompressData( static_cast< const BcU8* >( pData ), Size, pPackedData, PackedSize ) ) { HaveCompressed = BcTrue; } else { // On failure, strip compressed flag. Flags &= ~csPCF_COMPRESSED; } } // Generate header. CsPackageChunkHeader ChunkHeader; ChunkHeader.ID_ = ID; ChunkHeader.Offset_ = 0; ChunkHeader.Flags_ = Flags; ChunkHeader.RequiredAlignment_ = static_cast< BcU32 >( RequiredAlignment ); ChunkHeader.PackedBytes_ = static_cast< BcU32 >( PackedSize ); ChunkHeader.UnpackedBytes_ = static_cast< BcU32 >( Size ); ChunkHeader.PackedHash_ = BcHash( (BcU8*)pPackedData, PackedSize ); ChunkHeader.UnpackedHash_ = BcHash( (BcU8*)pData, Size ); // Generate data. CsPackageChunkData ChunkData; ChunkData.Status_ = csPCS_NOT_LOADED; ChunkData.Managed_ = BcFalse; // Store as packed data. ChunkData.pPackedData_ = new BcU8[ PackedSize ]; ChunkData.pUnpackedData_ = NULL; BcMemCopy( ChunkData.pPackedData_, pPackedData, PackedSize ); if( HaveCompressed ) { delete [] pPackedData; } // Push into lists. ChunkHeaders_.push_back( ChunkHeader ); ChunkDatas_.push_back( ChunkData ); return static_cast< BcU32 >( ChunkHeaders_.size() - 1 ); }
////////////////////////////////////////////////////////////////////////// // import //virtual BcBool ScnTexture::import( class CsPackageImporter& Importer, const Json::Value& Object ) { const std::string& FileName = Object[ "source" ].asString(); // Add root dependency. Importer.addDependency( FileName.c_str() ); // Load texture from file and create the data for export. ImgImage* pImage = Img::load( FileName.c_str() ); if( pImage != NULL ) { // Encode the image as a format. BcU8* pEncodedImageData = NULL; BcU32 EncodedImageDataSize = 0; // TODO: Take from parameters. ImgEncodeFormat EncodeFormat = imgEF_RGBA8; eRsTextureFormat TextureFormat = rsTF_RGBA8; if( pImage->encodeAs( EncodeFormat, pEncodedImageData, EncodedImageDataSize ) ) { // Serialize encoded image. BcStream BodyStream( BcFalse, 1024, EncodedImageDataSize ); BodyStream.push( pEncodedImageData, EncodedImageDataSize ); delete pEncodedImageData; pEncodedImageData = NULL; BcStream HeaderStream; THeader Header = { pImage->width(), pImage->height(), 1, TextureFormat }; HeaderStream << Header; // Delete image. delete pImage; // Add chunks and finish up. Importer.addChunk( BcHash( "header" ), HeaderStream.pData(), HeaderStream.dataSize(), 16, csPCF_IN_PLACE ); Importer.addChunk( BcHash( "body" ), BodyStream.pData(), BodyStream.dataSize() ); // return BcTrue; } else { BcPrintf( "Failed to encode image \"%s\"\n", FileName.c_str() ); } } else { BcPrintf( "Failed to load image \"%s\"\n", FileName.c_str() ); } return BcFalse; }
////////////////////////////////////////////////////////////////////////// // fileChunkReady //virtual void ScnRenderTarget::fileChunkReady( BcU32 ChunkIdx, BcU32 ChunkID, void* pData ) { if( ChunkID == BcHash( "header" ) ) { markCreate(); } }
////////////////////////////////////////////////////////////////////////// // fileChunkReady void ScnComponent::fileChunkReady( BcU32 ChunkIdx, BcU32 ChunkID, void* pData ) { if( ChunkID == BcHash( "object" ) ) { pJsonObject_ = reinterpret_cast< const BcChar* >( pData ); // Initialise json object. Json::Value Root; Json::Reader Reader; if( Reader.parse( pJsonObject_, Root ) ) { // New way. CsSerialiserPackageObjectCodec ObjectCodec( getPackage(), bcRFF_IMPORTER, bcRFF_NONE, bcRFF_IMPORTER ); SeJsonReader Reader( &ObjectCodec ); Reader.serialiseClassMembers( this, this->getClass(), Root, 0 ); // Now reinitialise. initialise(); } else { BcBreakpoint; } CsResource::markCreate(); CsResource::markReady(); } }
////////////////////////////////////////////////////////////////////////// // fileChunkReady void ScnSound::fileChunkReady( BcU32 ChunkIdx, BcU32 ChunkID, void* pData ) { if( ChunkID == BcHash( "filedata" ) ) { pFileData_ = (SsSourceFileData*)pData; markCreate(); } }
////////////////////////////////////////////////////////////////////////// // getChecksum BcU32 GaGameUnit::getChecksum() const { BcU32 Checksum = 0; Checksum += BcHash( (BcU8*)&TeamID_, sizeof( TeamID_ ) ); Checksum += BcHash( (BcU8*)&ID_, sizeof( ID_ ) ); Checksum += BcHash( (BcU8*)&Behaviour_, sizeof( Behaviour_ ) ); Checksum += BcHash( (BcU8*)&NextState_, sizeof( NextState_ ) ); Checksum += BcHash( (BcU8*)&CurrState_, sizeof( CurrState_ ) ); Checksum += BcHash( (BcU8*)&PrevState_, sizeof( PrevState_ ) ); Checksum += BcHash( (BcU8*)&AttackTimer_, sizeof( AttackTimer_ ) ); Checksum += BcHash( (BcU8*)&Health_, sizeof( Health_ ) ); Checksum += BcHash( (BcU8*)&MoveTargetPosition_, sizeof( MoveTargetPosition_ ) ); return Checksum; }
////////////////////////////////////////////////////////////////////////// // fileChunkReady void ScnModel::fileChunkReady( BcU32 ChunkIdx, BcU32 ChunkID, void* pData ) { // If we have no render core get chunk 0 so we keep getting entered into. if( RsCore::pImpl() == NULL ) { requestChunk( 0 ); return; } if( ChunkID == BcHash( "header" ) ) { pHeader_ = (ScnModelHeader*)pData; } else if( ChunkID == BcHash( "nodetransformdata" ) ) { pNodeTransformData_ = (ScnModelNodeTransformData*)pData; } else if( ChunkID == BcHash( "nodepropertydata" ) ) { pNodePropertyData_ = (ScnModelNodePropertyData*)pData; // Mark up node names. // TODO: Automate this process with reflection! for( BcU32 NodeIdx = 0; NodeIdx < pHeader_->NoofNodes_; ++NodeIdx ) { ScnModelNodePropertyData* pNodePropertyNode = &pNodePropertyData_[ NodeIdx ]; markupName( pNodePropertyNode->Name_ ); } } else if( ChunkID == BcHash( "vertexdata" ) ) { BcAssert( pVertexBufferData_ == NULL || pVertexBufferData_ == pData ); pVertexBufferData_ = (BcU8*)pData; } else if( ChunkID == BcHash( "indexdata" ) ) { BcAssert( pIndexBufferData_ == NULL || pIndexBufferData_ == pData ); pIndexBufferData_ = (BcU8*)pData; } else if( ChunkID == BcHash( "vertexelements" ) ) { pVertexElements_ = (RsVertexElement*)pData; } else if( ChunkID == BcHash( "meshdata" ) ) { pMeshData_ = (ScnModelMeshData*)pData; RsVertexElement* pVertexElements = pVertexElements_; for( BcU32 Idx = 0; Idx < pHeader_->NoofPrimitives_; ++Idx ) { pMeshData_->VertexElements_ = pVertexElements; pVertexElements += pMeshData_->NoofVertexElements_; } markCreate(); // All data loaded, time to create. } }
////////////////////////////////////////////////////////////////////////// // fileChunkReady //virtual void AkBank::fileChunkReady( BcU32 ChunkIdx, BcU32 ChunkID, void* pData ) { if( ChunkID == BcHash( "header" ) ) { pBankName_ = getString( Header_.BankNameID_ ); if( AkCore::pImpl() ) { if ( AK::SoundEngine::LoadBank( pBankName_, AK_DEFAULT_POOL_ID, BankID_ ) != AK_Success ) { BcVerifyMsg( BcFalse, "Unable to load Wwise sound bank \"%s\". Have they been generated from the Wwise tool?", pBankName_ ); } } }; }
////////////////////////////////////////////////////////////////////////// // import //virtual BcBool AkBank::import( class CsPackageImporter& Importer, const Json::Value& Object ) { // Temporary hack until post-LD. std::string BankName = (*getName()); BcStream HeaderStream; THeader Header = { Importer.addString( BankName.c_str() ) }; HeaderStream << Header; // Add chunks and finish up. Importer.addChunk( BcHash( "header" ), HeaderStream.pData(), HeaderStream.dataSize(), 16, csPCF_IN_PLACE ); return BcTrue; }
////////////////////////////////////////////////////////////////////////// // import //virtual BcBool ScnRenderTarget::import( class CsPackageImporter& Importer, const Json::Value& Object ) { BcU32 Width = Object[ "width" ].asUInt(); BcU32 Height = Object[ "height" ].asUInt(); // Streams. BcStream HeaderStream; // Write header. ScnTextureHeader Header = { Width, Height, 1, 1, rsTT_2D, rsTF_RGBA8 }; HeaderStream << Header; Importer.addChunk( BcHash( "header" ), HeaderStream.pData(), HeaderStream.dataSize(), 16, csPCF_IN_PLACE ); return BcTrue; }
////////////////////////////////////////////////////////////////////////// // import BcBool ScnAnimationImport::import( const Json::Value& ) { #if PSY_IMPORT_PIPELINE if( Source_.empty() ) { PSY_LOG( "ERROR: Missing 'source' field.\n" ); return BcFalse; } CsResourceImporter::addDependency( Source_.c_str() ); auto PropertyStore = aiCreatePropertyStore(); aiLogStream AssimpLogger = { AssimpLogStream, (char*)this }; aiAttachLogStream( &AssimpLogger ); Scene_ = aiImportFileExWithProperties( Source_.c_str(), 0, nullptr, PropertyStore ); aiReleasePropertyStore( PropertyStore ); if( Scene_ != nullptr ) { PSY_LOG( "Found %u animations:\n", Scene_->mNumAnimations ); for( int Idx = 0; Idx < (int)Scene_->mNumAnimations; ++Idx ) { PSY_LOG( " - %s\n", Scene_->mAnimations[ Idx ]->mName.C_Str() ); } // Build animated nodes list. Need this to calculate relative transforms later. recursiveParseAnimatedNodes( Scene_->mRootNode, BcErrorCode ); // Pack down animation into useful internal format. BcAssert( Scene_->mNumAnimations == 1 ); for( BcU32 AnimationIdx = 0; AnimationIdx < 1; ++AnimationIdx ) { auto* Animation = Scene_->mAnimations[ AnimationIdx ]; BcF32 Rate = 1.0f; BcU32 Duration = static_cast< BcU32 >( Animation->mDuration / Rate ); // Setup data streams. ScnAnimationHeader Header; Header.NoofNodes_ = Animation->mNumChannels; Header.NoofPoses_ = Duration; Header.Flags_ = scnAF_DEFAULT; Header.Packing_ = scnAP_R16S16T16; // TODO: Make this configurable when we factor out into another class. HeaderStream_ << Header; // Animation node file data. ScnAnimationNodeFileData NodeFileData; for( BcU32 NodeIdx = 0; NodeIdx < Animation->mNumChannels; ++NodeIdx ) { auto* Channel = Animation->mChannels[ NodeIdx ]; NodeFileData.Name_ = CsResourceImporter::addString( Channel->mNodeName.C_Str() ); NodeStream_ << NodeFileData; } // Calculate output pose. for( BcF32 Time = 0.0f; Time <= Animation->mDuration; Time += Rate ) { ScnAnimationPoseFileData Pose; Pose.Time_ = Time / FrameRate_; Pose.KeyDataOffset_ = static_cast< BcU32 >( KeyStream_.dataSize() ); // Iterate over all node channels to generate keys. for( BcU32 ChannelIdx = 0; ChannelIdx < Animation->mNumChannels; ++ChannelIdx ) { auto* Channel = Animation->mChannels[ ChannelIdx ]; auto& AnimatedNode = findAnimatedNode( Channel->mNodeName.C_Str() ); aiVector3D OutPositionKey; aiVector3D OutScaleKey; aiQuaternion OutRotationKey; // Extract position. GetKeyNodeAnim( Channel->mPositionKeys, Channel->mNumPositionKeys, Time, BcTrue, OutPositionKey ); // Extract scale. GetKeyNodeAnim( Channel->mScalingKeys, Channel->mNumScalingKeys, Time, BcTrue, OutScaleKey ); // Extract rotation. GetKeyNodeAnim( Channel->mRotationKeys, Channel->mNumRotationKeys, Time, BcTrue, OutRotationKey ); // Combine key into transform. ScnAnimationTransform Transform; Transform.R_ = MaQuat( OutRotationKey.x, OutRotationKey.y, OutRotationKey.z, OutRotationKey.w ); Transform.S_ = MaVec3d( OutScaleKey.x, OutScaleKey.y, OutScaleKey.z ); Transform.T_ = MaVec3d( OutPositionKey.x, OutPositionKey.y, OutPositionKey.z ); // Store as local matrix. Transform.toMatrix( AnimatedNode.LocalTransform_ ); } // Calculate local node matrices relative to their parents. for( auto& AnimatedNode : AnimatedNodes_ ) { if( AnimatedNode.ParentIdx_ != BcErrorCode ) { auto& ParentAnimatedNode( AnimatedNodes_[ AnimatedNode.ParentIdx_ ] ); MaMat4d ParentLocal = ParentAnimatedNode.LocalTransform_; AnimatedNode.WorldTransform_ = ParentLocal * AnimatedNode.LocalTransform_; } else { AnimatedNode.WorldTransform_ = AnimatedNode.LocalTransform_; } } // Write out pose keys. ScnAnimationTransformKey_R16S16T16 OutKey; for( BcU32 ChannelIdx = 0; ChannelIdx < Animation->mNumChannels; ++ChannelIdx ) { auto* Channel = Animation->mChannels[ ChannelIdx ]; const auto& AnimatedNode = findAnimatedNode( Channel->mNodeName.C_Str() ); // Extract individual transform elements. ScnAnimationTransform Transform; Transform.fromMatrix( AnimatedNode.LocalTransform_ ); // Pack into output key. OutKey.pack( Transform.R_, Transform.S_, Transform.T_ ); KeyStream_ << OutKey; } // Final size + CRC. Pose.KeyDataSize_ = static_cast< BcU32 >( KeyStream_.dataSize() - Pose.KeyDataOffset_ ); Pose.CRC_ = BcHash::GenerateCRC32( 0, KeyStream_.pData() + Pose.KeyDataOffset_, Pose.KeyDataSize_ ); // Write out pose. PoseStream_ << Pose; } // Write out chunks. CsResourceImporter::addChunk( BcHash( "header" ), HeaderStream_.pData(), HeaderStream_.dataSize(), 16, csPCF_IN_PLACE ); CsResourceImporter::addChunk( BcHash( "nodes" ), NodeStream_.pData(), NodeStream_.dataSize() ); CsResourceImporter::addChunk( BcHash( "poses" ), PoseStream_.pData(), PoseStream_.dataSize() ); CsResourceImporter::addChunk( BcHash( "keys" ), KeyStream_.pData(), KeyStream_.dataSize() ); } aiReleaseImport( Scene_ ); Scene_ = nullptr; // return BcTrue; } #endif // PSY_IMPORT_PIPELINE return BcFalse; }
////////////////////////////////////////////////////////////////////////// // import BcBool CsPackageImporter::import( const BcName& Name ) { Name_ = Name; BcPath Path = CsCore::pImpl()->getPackageImportPath( Name ); PSY_LOGSCOPEDCATEGORY( "Import" ); PSY_LOG( "Importing %s...\n", (*Path).c_str() ); PSY_LOGSCOPEDINDENT; BcTimer TotalTimer; TotalTimer.mark(); // Store source file info. FsStats Stats; if( FsCore::pImpl()->fileStats( (*Path).c_str(), Stats ) ) { Header_.SourceFileStatsHash_ = BcHash( reinterpret_cast< BcU8* >( &Stats ), sizeof( Stats ) ); } else { Header_.SourceFileStatsHash_ = 0; } beginImport(); Header_.SourceFile_ = addString( (*Path).c_str() ); endImport(); Json::Value Root; if( loadJsonFile( (*Path).c_str(), Root ) ) { // Add as dependency. beginImport(); addDependency( (*Path).c_str() ); // Get resource list. Json::Value Resources( Root.get( "resources", Json::Value( Json::arrayValue ) ) ); // Add all package cross refs. addAllPackageCrossRefs( Resources ); // Set resource id to zero. ResourceIds_.store( 0 ); // Import everything. for( const auto& ResourceObject : Resources ) { addImport( ResourceObject, BcFalse ); } endImport(); // Sort importers. std::sort( Resources_.begin(), Resources_.end() ); // Iterate over all resources and import (import calls can append to the list) size_t CurrResourceIdx = 0; while( CurrResourceIdx < Resources_.size() ) { // Grab first resource in the list. auto ResourceEntry = std::move( Resources_[ CurrResourceIdx++ ] ); // Import resource. BcTimer ResourceTimer; ResourceTimer.mark(); try { PSY_LOGSCOPEDINDENT; beginImport(); if( importResource( std::move( ResourceEntry.Importer_ ), ResourceEntry.Resource_ ) ) { PSY_LOG( "SUCCEEDED: Time: %.2f seconds.\n", ResourceTimer.time() ); } else { PSY_LOG( "FAILED: Time: %.2f seconds.\n", ResourceTimer.time() ); BcBreakpoint; endImport(); return BcFalse; } endImport(); } catch( CsImportException ImportException ) { PSY_LOG( "FAILED: Time: %.2f seconds.\n", ResourceTimer.time() ); PSY_LOG( "ERROR: in file %s:\n%s\n", ImportException.file().c_str(), ImportException.what() ); endImport(); return BcFalse; } } // Save and return. BcPath PackedPackage( CsCore::pImpl()->getPackagePackedPath( Name ) ); BcBool SaveSuccess = save( PackedPackage ); if( SaveSuccess ) { PSY_LOG( "SUCCEEDED: Time: %.2f seconds.\n", TotalTimer.time() ); // Write out dependencies. std::string OutputDependencies = *CsCore::pImpl()->getPackageIntermediatePath( Name ) + "/deps.json"; CsSerialiserPackageObjectCodec ObjectCodec( nullptr, (BcU32)bcRFF_ALL, (BcU32)bcRFF_TRANSIENT, 0 ); SeJsonWriter Writer( &ObjectCodec ); Writer << Dependencies_; Writer.save( OutputDependencies.c_str() ); } else { PSY_LOG( "FAILED: Time: %.2f seconds.\n", TotalTimer.time() ); BcBreakpoint; } return SaveSuccess; } return BcFalse; }
////////////////////////////////////////////////////////////////////////// // processResourceChunk void CsPackageLoader::processResourceChunk( BcU32 ResourceIdx, BcU32 ChunkIdx ) { CsPackageResourceHeader& ResourceHeader = pResourceHeaders_[ ResourceIdx ]; CsPackageChunkHeader& ChunkHeader = pChunkHeaders_[ ChunkIdx ]; CsPackageChunkData& ChunkData = pChunkData_[ ChunkIdx ]; CsResource* pResource = pPackage_->getResource( ResourceIdx ); BcU32 ResourceChunkIdx = ChunkIdx - ResourceHeader.FirstChunk_; BcAssert( pResource != NULL ); BcAssert( ChunkIdx >= ResourceHeader.FirstChunk_ && ChunkIdx <= ResourceHeader.LastChunk_ ); // Update the status. switch( ChunkData.Status_ ) { case csPCS_NOT_LOADED: { // Ensure we've got a pointer for unpacked data. BcAssert( ChunkData.pUnpackedData_ != NULL ); // Set status to loading. ChunkData.Status_ = csPCS_LOADING; // If we've got compressed data, read into it. if( ChunkHeader.Flags_ & csPCF_COMPRESSED ) { // Chunk isn't loaded, need to read in data. BcU32 DataPosition = DataPosition_ + ChunkHeader.Offset_; BcU32 Bytes = ChunkHeader.PackedBytes_; // ChunkData.pPackedData_ = new BcU8[ Bytes ]; // Do async read. ++PendingCallbackCount_; File_.readAsync( DataPosition, ChunkData.pPackedData_, Bytes, FsFileOpDelegate::bind< CsPackageLoader, &CsPackageLoader::onDataLoaded >( this ) ); } else { // Chunk isn't loaded, need to read in data. BcU32 DataPosition = DataPosition_ + ChunkHeader.Offset_; BcU32 Bytes = ChunkHeader.UnpackedBytes_; // Do async read. ++PendingCallbackCount_; File_.readAsync( DataPosition, ChunkData.pUnpackedData_, Bytes, FsFileOpDelegate::bind< CsPackageLoader, &CsPackageLoader::onDataLoaded >( this ) ); } } break; case csPCS_LOADING: { // If this is a compressed chunk, we need to move to the unpacking stage first. if( ( ChunkHeader.Flags_ & csPCF_COMPRESSED ) == 0 ) { // Check the data is valid. BcU32 Hash = BcHash( ChunkData.pUnpackedData_, ChunkHeader.UnpackedBytes_ ); BcAssertMsg( Hash == ChunkHeader.UnpackedHash_, "Corrupted data." ); // Set status to ready. ChunkData.Status_ = csPCS_READY; } else { // Set status to unpacking. ChunkData.Status_ = csPCS_UNPACKING; // Check the data is valid. BcU32 Hash = BcHash( ChunkData.pPackedData_, ChunkHeader.PackedBytes_ ); BcAssertMsg( Hash == ChunkHeader.PackedHash_, "Corrupted data." ); // TODO: Async decompress. // Uncompress. if( BcDecompressData( ChunkData.pPackedData_, ChunkHeader.PackedBytes_, ChunkData.pUnpackedData_, ChunkHeader.UnpackedBytes_ ) ) { // Done, free packed data. delete [] ChunkData.pPackedData_; ChunkData.pPackedData_ = NULL; // Set status to ready. ChunkData.Status_ = csPCS_READY; } else { BcBreakpoint; } } } break; case csPCS_UNPACKING: { // TODO: Unpacking is complete, free packed data. ChunkData.Status_ = csPCS_READY; } break; case csPCS_READY: { // Don't need to do any processing. } break; } // If state has changed to ready, do callback. if( ChunkData.Status_ == csPCS_READY ) { // Queue up callback. BcDelegate< void (*)( BcU32, BcU32, void* ) > Delegate( BcDelegate< void (*)( BcU32, BcU32, void* ) >::bind< CsResource, &CsResource::onFileChunkReady >( pResource ) ); SysKernel::pImpl()->enqueueCallback( Delegate, ResourceChunkIdx, ChunkHeader.ID_, ChunkData.pUnpackedData_ ); } }
////////////////////////////////////////////////////////////////////////// // onHeaderLoaded void CsPackageLoader::onHeaderLoaded( void* pData, BcSize Size ) { // Check we have the right data. BcAssert( pData == &Header_ ); BcAssert( Size == sizeof( Header_ ) ); // Check the header is valid. if( Header_.Magic_ != CsPackageHeader::MAGIC ) { BcPrintf( "CsPackageLoader: Invalid magic number. Not a valid package.\n" ); HasError_ = BcTrue; --PendingCallbackCount_; return; } // Check version number. if( Header_.Version_ != CsPackageHeader::VERSION ) { BcPrintf( "CsPackageLoader: Out of date package. Requires reimport.\n" ); HasError_ = BcTrue; --PendingCallbackCount_; return; } #if PSY_SERVER // Reimport if source file stats changed. const BcPath ImportPackage( CsCore::pImpl()->getPackageImportPath( pPackage_->getName() ) ); FsStats Stats; if( FsCore::pImpl()->fileStats( (*ImportPackage).c_str(), Stats ) ) { if( Header_.SourceFileStatsHash_ != BcHash( reinterpret_cast< BcU8* >( &Stats ), sizeof( Stats ) )) { BcPrintf( "CsPackageLoader: Source file stats have changed.\n" ); HasError_ = BcTrue; --PendingCallbackCount_; return; } } #endif // Allocate all the memory we need up front. pPackageData_ = BcMemAlign( Header_.TotalAllocSize_, Header_.MaxAlignment_ ); // Use this to advance as we need. BcU8* pCurrPackageData = reinterpret_cast< BcU8* >( pPackageData_ ); // Loaded header, now markup the string table, chunks & props. pStringTable_ = reinterpret_cast< BcChar* >( pCurrPackageData ); pCurrPackageData += BcCalcAlignment( Header_.StringTableBytes_, Header_.MinAlignment_ ); pResourceHeaders_ = reinterpret_cast< CsPackageResourceHeader* >( pCurrPackageData ); pCurrPackageData += BcCalcAlignment( Header_.TotalResources_ * sizeof( CsPackageResourceHeader ), Header_.MinAlignment_ ); pChunkHeaders_ = reinterpret_cast< CsPackageChunkHeader* >( pCurrPackageData ); pCurrPackageData += BcCalcAlignment( Header_.TotalChunks_ * sizeof( CsPackageChunkHeader ), Header_.MinAlignment_ ); pChunkData_ = reinterpret_cast< CsPackageChunkData* >( pCurrPackageData ); pCurrPackageData += BcCalcAlignment( Header_.TotalChunks_ * sizeof( CsPackageChunkData ), Header_.MinAlignment_ ); // Clear string table. BcMemZero( pStringTable_, Header_.StringTableBytes_ ); // Clear chunk data. for( BcU32 Idx = 0; Idx < Header_.TotalChunks_; ++Idx ) { pChunkData_[ Idx ].Status_ = csPCS_NOT_LOADED; pChunkData_[ Idx ].Managed_ = BcFalse; pChunkData_[ Idx ].pPackedData_ = NULL; pChunkData_[ Idx ].pUnpackedData_ = NULL; } // Setup file position data. BcU32 Bytes = 0; // Load the string table in. ++PendingCallbackCount_; Bytes = Header_.StringTableBytes_; File_.readAsync( DataPosition_, pStringTable_, Bytes, FsFileOpDelegate::bind< CsPackageLoader, &CsPackageLoader::onStringTableLoaded >( this ) ); DataPosition_ += Bytes; // Load Resources in. ++PendingCallbackCount_; Bytes = Header_.TotalResources_ * sizeof( CsPackageResourceHeader ); File_.readAsync( DataPosition_, pResourceHeaders_, Bytes, FsFileOpDelegate::bind< CsPackageLoader, &CsPackageLoader::onResourceHeadersLoaded >( this ) ); DataPosition_ += Bytes; // Load chunks in. ++PendingCallbackCount_; Bytes = Header_.TotalChunks_ * sizeof( CsPackageChunkHeader ); File_.readAsync( DataPosition_, pChunkHeaders_, Bytes, FsFileOpDelegate::bind< CsPackageLoader, &CsPackageLoader::onChunkHeadersLoaded >( this ) ); DataPosition_ += Bytes; // This callback is complete. --PendingCallbackCount_; }
//////////////////////////////////////////////////////////////////////////////// // update eSysStateReturn GaMatchmakingState::main() { BcScopedLock< BcMutex > Lock( Lock_ ); BcReal Delta = SysKernel::pImpl()->getFrameTime(); switch( HandshakeState_ ) { case HSS_STUN: { // Only do once. if( MappedHandshakeAddr_ == 0 ) { if( doSTUN() ) { SysID_ = static_cast< BcU32 >( BcHash( (BcU8*)&MappedHandshakeAddr_, sizeof( MappedHandshakeAddr_ ) ) ); // Hash the mapped address to we don't broadcast it. if( MappedHandshakeAddr_ != 0 ) { HandshakeState_ = HSS_IDLE; } } else { HandshakeState_ = HSS_STUN; } } else { HandshakeState_ = HSS_IDLE; } } break; case HSS_IDLE: { ConnectTimer_ -= Delta; if( ConnectTimer_ < 0.0f ) { if( pSession_ == NULL ) { pSession_ = irc_create_session( &Callbacks_ ); } if( pSession_ != NULL && !irc_is_connected( pSession_ ) ) { irc_set_ctx( pSession_, this ); std::string Channel = "#testchannel"; BcFile File; if( File.open( "config.json" ) ) { char* pData = new char[ File.size() ]; File.read( pData, File.size() ); Json::Reader Reader; Json::Value Root; if( Reader.parse( pData, pData + File.size(), Root ) ) { Channel = Root["channel"].asCString(); } delete [] pData; } BcSPrintf( ScreenName_, "%s_%x", "PSY", BcRandom::Global.rand() ); BcSPrintf( Channel_, Channel.c_str() ); // Connect to the server. int RetVal = irc_connect( pSession_, "www.neilo.gd", 8000, NULL, ScreenName_, ScreenName_, ScreenName_ ); if( RetVal == 0 ) { // Start the thread to tick the client. BcThread::start( "EvtBridgeIRC" ); ClientID_ = BcErrorCode; RemoteHandshakeAddr_ = 0; RemoteHandshakePort_ = 0; //LocalHandshakeAddr_ = 0; //LocalHandshakePort_ = 0; //MappedHandshakeAddr_ = 0; //MappedHandshakePort_ = 0; HandshakeState_ = HSS_WAIT_INVITE; } else { BcThread::join(); irc_destroy_session( pSession_ ); pSession_ = NULL; } } } } break; case HSS_WAIT_INVITE: { InviteTimer_ -= Delta; if( InviteTimer_ < 0.0f ) { InviteTimer_ = BcAbs( BcRandom::Global.randReal() ) * 5.0f + 5.0f; // Send play with me message to channel. BcChar PlayBuffer[256]; BcSPrintf( PlayBuffer, "REQ:%u", SysID_ ); irc_cmd_msg( pSession_, Channel_, PlayBuffer ); } } break; case HSS_WAIT_ADDR: { HandshakeTimer_ -= Delta; if( HandshakeTimer_ < 0.0f ) { HandshakeState_ = HSS_WAIT_INVITE; } } break; case HSS_COMPLETE: { BcPrintf("GaMatchmakingState: Complete! ClientID of ours is %u\n", ClientID_); return sysSR_FINISHED; } break; } if( HandshakeState_ != HSS_STUN ) { if( HandshakeState_ != HSS_IDLE && ( pSession_ == NULL || !irc_is_connected( pSession_ ) ) ) { BcSleep( 0.1f ); BcThread::join(); BcSleep( 0.1f ); if( pSession_ != NULL ) { irc_destroy_session( pSession_ ); pSession_ = NULL; } HandshakeState_ = HSS_IDLE; ConnectTimer_ = 10.0f; } } return sysSR_CONTINUE; }
////////////////////////////////////////////////////////////////////////// // CsFileWriter BcBool CsFileWriter::save() { const BcChar* pType = BcStrStr( Name_.c_str(), "." ); Header_.ID_ = BcHash( pType ); if( File_.open( Name_.c_str(), bcFM_WRITE ) ) { // Generate string table. BcStream StringTableStream; for( BcU32 Idx = 0; Idx < StringList_.size(); ++Idx ) { const std::string& StringEntry( StringList_[ Idx ] ); StringTableStream.push( StringEntry.c_str(), StringEntry.size() + 1 ); } // Write header. Header_.NoofChunks_ = Chunks_.size(); Header_.StringTableSize_ = StringTableStream.dataSize(); File_.write( &Header_, sizeof( Header_ ) ); // Write string table. File_.write( StringTableStream.pData(), StringTableStream.dataSize() ); // Write chunks. BcU32 Offset = sizeof( CsFileHeader ) + StringTableStream.dataSize() + ( sizeof( CsFileChunk ) * Chunks_.size() ); { CsFileChunkNativeListIterator Iter = Chunks_.begin(); while( Iter != Chunks_.end() ) { CsFileChunkNative Chunk = (*Iter); CsFileChunk FileChunk; FileChunk.ID_ = Chunk.ID_; FileChunk.Offset_ = Offset; FileChunk.Size_ = Chunk.Size_; FileChunk.Hash_ = (BcU32)BcHash( Chunk.pData_, Chunk.Size_ ); Offset += Chunk.Size_; File_.write( &FileChunk, sizeof( FileChunk ) ); // Next ++Iter; } } // Write data. { CsFileChunkNativeListIterator Iter = Chunks_.begin(); while( Iter != Chunks_.end() ) { CsFileChunkNative Chunk = (*Iter); File_.write( Chunk.pData_, Chunk.Size_ ); // Next ++Iter; } } // Done and done. File_.close(); return BcTrue; } return BcFalse; }