////////////////////////////////////////////////////////////////////////// // processOpen BcBool SysSystem::processOpen() { PSY_LOG( "============================================================================\n" ); PSY_LOG( "SysSystem (%s @ 0x%p) open:\n", (*Name_).c_str(), this ); // Pre-open event. EvtPublisher::publish( sysEVT_SYSTEM_PRE_OPEN, SysSystemEvent( this ) ); // Tick open. open(); // Post-open event. EvtPublisher::publish( sysEVT_SYSTEM_POST_OPEN, SysSystemEvent( this ) ); // Advance to update if a stop hasn't been triggered. if( StopTriggered_ == BcFalse ) { ProcessState_ = STATE_UPDATE; } else { ProcessState_ = STATE_CLOSE; } return BcTrue; }
////////////////////////////////////////////////////////////////////////// // importResource BcBool CsPackageImporter::importResource( CsResourceImporterUPtr Importer, const Json::Value& Resource ) { // Catch name being missing. if( Importer->getResourceName().empty() ) { PSY_LOG( "ERROR: Name not specified for resource.\n" ); return BcFalse; } // Catch type being missing. if( Importer->getResourceType().empty() ) { PSY_LOG( "ERROR: Type not specified for resource.\n" ); return BcFalse; } PSY_LOG( "INFO: Processing \"%s\" of type \"%s\"\n", Importer->getResourceName().c_str(), Importer->getResourceType().c_str() ); // Get first chunk used by resource. size_t FirstChunk = ChunkHeaders_.size(); BcBool SuccessfulImport = BcFalse; // NOTE: Eventually we will be exception safe throught the import // pipeline, so shouldn't need these adhoc try/catch blocks. try { PSY_LOGSCOPEDINDENT; SuccessfulImport = Importer->import( Resource ); // Check for error + critical messages. SuccessfulImport &= Importer->getMessageCount( CsMessageCategory::ERROR ) == 0; SuccessfulImport &= Importer->getMessageCount( CsMessageCategory::CRITICAL ) == 0; } catch( CsImportException ImportException ) { PSY_LOG( "ERROR: %s", ImportException.what() ); } // Handle success. if( SuccessfulImport ) { // Setup current resource header. CurrResourceHeader_.Name_ = addString( Importer->getResourceName().c_str() ); CurrResourceHeader_.Type_ = addString( Importer->getResourceType().c_str() ); CurrResourceHeader_.Flags_ = csPEF_DEFAULT; CurrResourceHeader_.FirstChunk_ = static_cast< BcU32 >( FirstChunk ); CurrResourceHeader_.LastChunk_ = static_cast< BcU32 >( ChunkHeaders_.size() - 1 ); // Assumes 1 chunk for resource. Fair assumption. // Make sure chunk indices are valid. BcAssert( CurrResourceHeader_.FirstChunk_ <= CurrResourceHeader_.LastChunk_ ); ResourceHeaders_.push_back( CurrResourceHeader_ ); } return SuccessfulImport; }
////////////////////////////////////////////////////////////////////////// // updateFileMonitoring void FsCoreImplHTML5::updateFileMonitoring() { std::lock_guard< std::mutex > Lock( FileMonitorLock_ ); // Check 1 file per update to prevent slowdown. if( FileMonitorMapIterator_ == FileMonitorMap_.end() ) { FileMonitorMapIterator_ = FileMonitorMap_.begin(); } else { // Grab file stats. const std::string& FileName = (*FileMonitorMapIterator_).first; FsStats& OldFileStats = (*FileMonitorMapIterator_).second; FsStats NewFileStats; if( fileStats( FileName.c_str(), NewFileStats ) ) { // Compare timestamps. if( NewFileStats.ModifiedTime_ != OldFileStats.ModifiedTime_ ) { // Publish message that file has changed/been created. if( OldFileStats.ModifiedTime_.isNull() == BcTrue ) { PSY_LOG( "FsCoreImplHTML5: File created: %s\n", FileName.c_str() ); EvtPublisher::publish( fsEVT_MONITOR_CREATED, FsEventMonitor( FileName.c_str(), OldFileStats, NewFileStats ) ); } else { PSY_LOG( "FsCoreImplHTML5: File modified: %s\n", FileName.c_str() ); EvtPublisher::publish( fsEVT_MONITOR_MODIFIED, FsEventMonitor( FileName.c_str(), OldFileStats, NewFileStats ) ); } } } else { // Publish message that file has been deleted. if( OldFileStats.ModifiedTime_.isNull() == BcFalse ) { PSY_LOG( "FsCoreImplHTML5: File deleted: %s\n", FileName.c_str() ); EvtPublisher::publish( fsEVT_MONITOR_DELETED, FsEventMonitor( FileName.c_str(), OldFileStats, NewFileStats ) ); } } // Store new stats. OldFileStats = NewFileStats; // Advance to next file. ++FileMonitorMapIterator_; } }
////////////////////////////////////////////////////////////////////////// // BcMessageBox BcMessageBoxReturn BcMessageBox( const BcChar* pTitle, const BcChar* pMessage, BcMessageBoxType Type, BcMessageBoxIcon Icon ) { // Log. PSY_LOG( "%s: %s\n", pTitle, pMessage ); return bcMBR_OK; // bcMBR_CANCEL }
////////////////////////////////////////////////////////////////////////// // loadJsonFile BcBool CsPackageImporter::loadJsonFile( const BcChar* pFileName, Json::Value& Root ) { BcBool Success = BcFalse; BcFile File; if( File.open( pFileName ) ) { const BcU8* pData = File.readAllBytes(); Json::Reader Reader; if( Reader.parse( (const char*)pData, (const char*)pData + File.size(), Root ) ) { Success = BcTrue; } else { PSY_LOG( "Failed to parse Json:\n %s\n", Reader.getFormatedErrorMessages().c_str() ); BcAssertMsg( BcFalse, "Failed to parse \"%s\", see log for more details.", pFileName ); } BcMemFree( (void*)pData ); } else { BcAssertMsg( BcFalse, "Failed to load \"%s\"", pFileName ); } return Success; }
////////////////////////////////////////////////////////////////////////// // open //virtual void OsCoreImplSDL::open() { if ( SDL_Init( SDL_INIT_EVERYTHING ) != 0 ) { PSY_LOG( "SDL_Init Error: %u\n", SDL_GetError() ); BcBreakpoint; } }
////////////////////////////////////////////////////////////////////////// // setupTopology void GaStructureComponent::setupTopology() { // Setup physics. std::vector< GaPhysicsPointMass > PointMasses; std::vector< GaPhysicsConstraint > Constraints; PointMasses.reserve( 4 ); Constraints.reserve( 6 ); // TODO: Calculate this *properly* instead of drunkenly guessing? const BcF32 Size = 64.0f; const BcF32 PointMass = 1.0f; MaVec2d Offsets[3] = { MaVec2d( 0.0f, -1.0f ) * Size * 0.5f, MaVec2d( -0.9f, 0.5f ) * Size * 0.5f, MaVec2d( 0.9f, 0.5f ) * Size * 0.5f, }; /* if( StructureType_ == GaStructureType::BASE ) { Offsets[0] = MaVec2d( 0.0f, -1.0f ) * Size * 0.5f; Offsets[1] = MaVec2d( -0.9f, 1.5f ) * Size * 0.5f; Offsets[2] = MaVec2d( 0.9f, 1.5f ) * Size * 0.5f; } */ MaVec2d Position = getParentEntity()->getWorldPosition().xy(); // Central point + external constraints. PointMasses.emplace_back( GaPhysicsPointMass( Position, 0.05f, 1.0f / PointMass ) ); Constraints.emplace_back( GaPhysicsConstraint( 0, 1, -1.0f, 0.1f ) ); Constraints.emplace_back( GaPhysicsConstraint( 0, 2, -1.0f, 0.1f ) ); Constraints.emplace_back( GaPhysicsConstraint( 0, 3, -1.0f, 0.1f ) ); // Outer edges. for( size_t Idx = 0; Idx < 3; ++Idx ) { const MaVec2d Offset( Offsets[ Idx ] ); PointMasses.emplace_back( GaPhysicsPointMass( Position + Offset, 0.01f, 1.0f / PointMass ) ); Constraints.emplace_back( GaPhysicsConstraint( 1 + Idx, 1 + ( ( Idx + 1 ) % 3 ), -1.0f, 1.0f ) ); } WeightedPoints_.push_back( 1 ); BouyantPoints_.push_back( 2 ); BouyantPoints_.push_back( 3 ); Physics_ = getParentEntity()->getComponentByType< GaPhysicsComponent >(); BcAssert( Physics_ ); Physics_->setup( std::move( PointMasses ), std::move( Constraints ) ); // Grab absolute position. AbsolutePosition_ = getParentEntity()->getWorldPosition().xy(); PSY_LOG( "GaStructureComponent::setupTopology: %f, %f", AbsolutePosition_.x(), AbsolutePosition_.y() ); }
////////////////////////////////////////////////////////////////////////// // processClose BcBool SysSystem::processClose() { PSY_LOG( "============================================================================\n" ); PSY_LOG( "SysSystem (%s @ 0x%p) close:\n", (*Name_).c_str(), this ); // Pre-close event. EvtPublisher::publish( sysEVT_SYSTEM_PRE_CLOSE, SysSystemEvent( this ) ); // Tick close. close(); // Post-close event. EvtPublisher::publish( sysEVT_SYSTEM_POST_CLOSE, SysSystemEvent( this ) ); // Advance to finished. ProcessState_ = STATE_FINISHED; // return BcTrue; }
////////////////////////////////////////////////////////////////////////// // addAllPackageCrossRefs void CsPackageImporter::addAllPackageCrossRefs( Json::Value& Root ) { std::lock_guard< std::recursive_mutex > Lock( BuildingLock_ ); BcAssert( BuildingBeginCount_ > 0 ); // If it's a string value, attempt to match it. if( Root.type() == Json::stringValue ) { std::cmatch Match; std::regex_match( Root.asCString(), Match, GRegex_ResourceReference ); // Try the weak match. // TODO: Merge into regex. if( Match.size() == 0 ) { std::regex_match( Root.asCString(), Match, GRegex_WeakResourceReference ); } if( Match.size() == 4 ) { BcU32 RefIndex = addPackageCrossRef( Root.asCString() ); // If we find it, replace string reference with a cross ref index. if( RefIndex != BcErrorCode ) { PSY_LOG("Adding crossref %u: %s\n", RefIndex, Root.asCString() ); Root = Json::Value( RefIndex ); } } } else if( Root.type() == Json::arrayValue ) { for( BcU32 Idx = 0; Idx < Root.size(); ++Idx ) { addAllPackageCrossRefs( Root[ Idx ] ); } } else if( Root.type() == Json::objectValue ) { Json::Value::Members MemberValues = Root.getMemberNames(); for( BcU32 Idx = 0; Idx < MemberValues.size(); ++Idx ) { addAllPackageCrossRefs( Root[ MemberValues[ Idx ] ] ); } } }
////////////////////////////////////////////////////////////////////////// // import BcBool ScnAnimationImport::import( const Json::Value& ) { #if PSY_IMPORT_PIPELINE if( Source_.empty() ) { PSY_LOG( "ERROR: Missing 'source' field.\n" ); return BcFalse; } CsResourceImporter::addDependency( Source_.c_str() ); auto PropertyStore = aiCreatePropertyStore(); aiLogStream AssimpLogger = { AssimpLogStream, (char*)this }; aiAttachLogStream( &AssimpLogger ); Scene_ = aiImportFileExWithProperties( Source_.c_str(), 0, nullptr, PropertyStore ); aiReleasePropertyStore( PropertyStore ); if( Scene_ != nullptr ) { PSY_LOG( "Found %u animations:\n", Scene_->mNumAnimations ); for( int Idx = 0; Idx < (int)Scene_->mNumAnimations; ++Idx ) { PSY_LOG( " - %s\n", Scene_->mAnimations[ Idx ]->mName.C_Str() ); } // Build animated nodes list. Need this to calculate relative transforms later. recursiveParseAnimatedNodes( Scene_->mRootNode, BcErrorCode ); // Pack down animation into useful internal format. BcAssert( Scene_->mNumAnimations == 1 ); for( BcU32 AnimationIdx = 0; AnimationIdx < 1; ++AnimationIdx ) { auto* Animation = Scene_->mAnimations[ AnimationIdx ]; BcF32 Rate = 1.0f; BcU32 Duration = static_cast< BcU32 >( Animation->mDuration / Rate ); // Setup data streams. ScnAnimationHeader Header; Header.NoofNodes_ = Animation->mNumChannels; Header.NoofPoses_ = Duration; Header.Flags_ = scnAF_DEFAULT; Header.Packing_ = scnAP_R16S16T16; // TODO: Make this configurable when we factor out into another class. HeaderStream_ << Header; // Animation node file data. ScnAnimationNodeFileData NodeFileData; for( BcU32 NodeIdx = 0; NodeIdx < Animation->mNumChannels; ++NodeIdx ) { auto* Channel = Animation->mChannels[ NodeIdx ]; NodeFileData.Name_ = CsResourceImporter::addString( Channel->mNodeName.C_Str() ); NodeStream_ << NodeFileData; } // Calculate output pose. for( BcF32 Time = 0.0f; Time <= Animation->mDuration; Time += Rate ) { ScnAnimationPoseFileData Pose; Pose.Time_ = Time / FrameRate_; Pose.KeyDataOffset_ = static_cast< BcU32 >( KeyStream_.dataSize() ); // Iterate over all node channels to generate keys. for( BcU32 ChannelIdx = 0; ChannelIdx < Animation->mNumChannels; ++ChannelIdx ) { auto* Channel = Animation->mChannels[ ChannelIdx ]; auto& AnimatedNode = findAnimatedNode( Channel->mNodeName.C_Str() ); aiVector3D OutPositionKey; aiVector3D OutScaleKey; aiQuaternion OutRotationKey; // Extract position. GetKeyNodeAnim( Channel->mPositionKeys, Channel->mNumPositionKeys, Time, BcTrue, OutPositionKey ); // Extract scale. GetKeyNodeAnim( Channel->mScalingKeys, Channel->mNumScalingKeys, Time, BcTrue, OutScaleKey ); // Extract rotation. GetKeyNodeAnim( Channel->mRotationKeys, Channel->mNumRotationKeys, Time, BcTrue, OutRotationKey ); // Combine key into transform. ScnAnimationTransform Transform; Transform.R_ = MaQuat( OutRotationKey.x, OutRotationKey.y, OutRotationKey.z, OutRotationKey.w ); Transform.S_ = MaVec3d( OutScaleKey.x, OutScaleKey.y, OutScaleKey.z ); Transform.T_ = MaVec3d( OutPositionKey.x, OutPositionKey.y, OutPositionKey.z ); // Store as local matrix. Transform.toMatrix( AnimatedNode.LocalTransform_ ); } // Calculate local node matrices relative to their parents. for( auto& AnimatedNode : AnimatedNodes_ ) { if( AnimatedNode.ParentIdx_ != BcErrorCode ) { auto& ParentAnimatedNode( AnimatedNodes_[ AnimatedNode.ParentIdx_ ] ); MaMat4d ParentLocal = ParentAnimatedNode.LocalTransform_; AnimatedNode.WorldTransform_ = ParentLocal * AnimatedNode.LocalTransform_; } else { AnimatedNode.WorldTransform_ = AnimatedNode.LocalTransform_; } } // Write out pose keys. ScnAnimationTransformKey_R16S16T16 OutKey; for( BcU32 ChannelIdx = 0; ChannelIdx < Animation->mNumChannels; ++ChannelIdx ) { auto* Channel = Animation->mChannels[ ChannelIdx ]; const auto& AnimatedNode = findAnimatedNode( Channel->mNodeName.C_Str() ); // Extract individual transform elements. ScnAnimationTransform Transform; Transform.fromMatrix( AnimatedNode.LocalTransform_ ); // Pack into output key. OutKey.pack( Transform.R_, Transform.S_, Transform.T_ ); KeyStream_ << OutKey; } // Final size + CRC. Pose.KeyDataSize_ = static_cast< BcU32 >( KeyStream_.dataSize() - Pose.KeyDataOffset_ ); Pose.CRC_ = BcHash::GenerateCRC32( 0, KeyStream_.pData() + Pose.KeyDataOffset_, Pose.KeyDataSize_ ); // Write out pose. PoseStream_ << Pose; } // Write out chunks. CsResourceImporter::addChunk( BcHash( "header" ), HeaderStream_.pData(), HeaderStream_.dataSize(), 16, csPCF_IN_PLACE ); CsResourceImporter::addChunk( BcHash( "nodes" ), NodeStream_.pData(), NodeStream_.dataSize() ); CsResourceImporter::addChunk( BcHash( "poses" ), PoseStream_.pData(), PoseStream_.dataSize() ); CsResourceImporter::addChunk( BcHash( "keys" ), KeyStream_.pData(), KeyStream_.dataSize() ); } aiReleaseImport( Scene_ ); Scene_ = nullptr; // return BcTrue; } #endif // PSY_IMPORT_PIPELINE return BcFalse; }
////////////////////////////////////////////////////////////////////////// // BcMessageBox BcMessageBoxReturn BcMessageBox( const BcChar* pTitle, const BcChar* pMessage, BcMessageBoxType Type, BcMessageBoxIcon Icon ) { UINT MBType = MB_TASKMODAL | MB_SETFOREGROUND | MB_TOPMOST; switch( Type ) { case bcMBT_OK: MBType |= MB_OK; break; case bcMBT_OKCANCEL: MBType |= MB_OKCANCEL; break; case bcMBT_YESNO: MBType |= MB_YESNO; break; case bcMBT_YESNOCANCEL: MBType |= MB_YESNOCANCEL; break; } switch( Icon ) { case bcMBI_WARNING: MBType |= MB_ICONWARNING; break; case bcMBI_ERROR: MBType |= MB_ICONERROR; break; case bcMBI_QUESTION: MBType |= MB_ICONQUESTION; break; default: MBType |= MB_ICONWARNING; break; } // Log. PSY_LOG( "%s: %s\n", pTitle, pMessage ); // TODO: HWND! int RetVal = ::MessageBoxA( NULL, pMessage, pTitle, MBType ); switch( RetVal ) { case IDOK: return bcMBR_OK; break; case IDYES: return bcMBR_YES; break; case IDNO: return bcMBR_NO; break; case IDCANCEL: return bcMBR_CANCEL; break; default: break; }; return bcMBR_OK; }
////////////////////////////////////////////////////////////////////////// // serialiseDict //virtual Json::Value SeJsonWriter::serialiseDict( void* pData, const ReField* pField, BcU32 ParentFlags ) { Json::Value DictValue( Json::objectValue ); auto pFieldValueType = pField->getValueType(); auto pFieldKeyType = pField->getKeyType(); auto ValueSerialiser = pFieldValueType->getTypeSerialiser(); auto KeySerialiser = pFieldKeyType->getTypeSerialiser(); auto pReadIterator = pField->newReadIterator( pField->getData< void >( pData ) ); // Run a check to make sure we are not a simple deref field. if( ( pField->getKeyFlags() & bcRFF_SIMPLE_DEREF ) != 0 ) { BcAssert( false ); return Json::nullValue; } // Early out if we can't serialise. if( KeySerialiser == nullptr ) { PSY_LOG( "SeJsonWriter: Unable to serialise for key \"%s\"\n", ( *pFieldKeyType->getName() ).c_str() ); return Json::nullValue; } if( ValueSerialiser == nullptr ) { PSY_LOG( "SeJsonWriter: Unable to serialise for value \"%s\"\n", ( *pFieldValueType->getName() ).c_str() ); return Json::nullValue; } std::string OutKeyString; // Iterate over values and serialise individually. // NOTE: Json only supports strings as keys, therefore // if serialising to Json, we must only support strings too. // We could support any object, but it's unlikely we will // use anything that isn't serialisable to strings. while( pReadIterator->isValid() ) { void* pValueData = pReadIterator->getValue(); void* pKeyData = pReadIterator->getKey(); if( KeySerialiser->serialiseToString( pKeyData, OutKeyString ) ) { Json::Value ClassValue; if( ( pField->getValueFlags() & bcRFF_SIMPLE_DEREF ) == 0 ) { ClassValue = serialiseClass( pValueData, static_cast< const ReClass* >( pFieldValueType ), ParentFlags, true ); // TODO: Only if pointer type. } else { void* pPointerValueData = *reinterpret_cast< void** >( pValueData ); ClassValue = serialisePointer( pPointerValueData, static_cast< const ReClass* >( pFieldValueType ), ParentFlags ); } DictValue[ OutKeyString ] = ClassValue; } else { BcAssert( false ); // This should never be hit. It means we're using an invalid key type. } pReadIterator->next(); } delete pReadIterator; return DictValue; }
////////////////////////////////////////////////////////////////////////// // import BcBool CsPackageImporter::import( const BcName& Name ) { Name_ = Name; BcPath Path = CsCore::pImpl()->getPackageImportPath( Name ); PSY_LOGSCOPEDCATEGORY( "Import" ); PSY_LOG( "Importing %s...\n", (*Path).c_str() ); PSY_LOGSCOPEDINDENT; BcTimer TotalTimer; TotalTimer.mark(); // Store source file info. FsStats Stats; if( FsCore::pImpl()->fileStats( (*Path).c_str(), Stats ) ) { Header_.SourceFileStatsHash_ = BcHash( reinterpret_cast< BcU8* >( &Stats ), sizeof( Stats ) ); } else { Header_.SourceFileStatsHash_ = 0; } beginImport(); Header_.SourceFile_ = addString( (*Path).c_str() ); endImport(); Json::Value Root; if( loadJsonFile( (*Path).c_str(), Root ) ) { // Add as dependency. beginImport(); addDependency( (*Path).c_str() ); // Get resource list. Json::Value Resources( Root.get( "resources", Json::Value( Json::arrayValue ) ) ); // Add all package cross refs. addAllPackageCrossRefs( Resources ); // Set resource id to zero. ResourceIds_.store( 0 ); // Import everything. for( const auto& ResourceObject : Resources ) { addImport( ResourceObject, BcFalse ); } endImport(); // Sort importers. std::sort( Resources_.begin(), Resources_.end() ); // Iterate over all resources and import (import calls can append to the list) size_t CurrResourceIdx = 0; while( CurrResourceIdx < Resources_.size() ) { // Grab first resource in the list. auto ResourceEntry = std::move( Resources_[ CurrResourceIdx++ ] ); // Import resource. BcTimer ResourceTimer; ResourceTimer.mark(); try { PSY_LOGSCOPEDINDENT; beginImport(); if( importResource( std::move( ResourceEntry.Importer_ ), ResourceEntry.Resource_ ) ) { PSY_LOG( "SUCCEEDED: Time: %.2f seconds.\n", ResourceTimer.time() ); } else { PSY_LOG( "FAILED: Time: %.2f seconds.\n", ResourceTimer.time() ); BcBreakpoint; endImport(); return BcFalse; } endImport(); } catch( CsImportException ImportException ) { PSY_LOG( "FAILED: Time: %.2f seconds.\n", ResourceTimer.time() ); PSY_LOG( "ERROR: in file %s:\n%s\n", ImportException.file().c_str(), ImportException.what() ); endImport(); return BcFalse; } } // Save and return. BcPath PackedPackage( CsCore::pImpl()->getPackagePackedPath( Name ) ); BcBool SaveSuccess = save( PackedPackage ); if( SaveSuccess ) { PSY_LOG( "SUCCEEDED: Time: %.2f seconds.\n", TotalTimer.time() ); // Write out dependencies. std::string OutputDependencies = *CsCore::pImpl()->getPackageIntermediatePath( Name ) + "/deps.json"; CsSerialiserPackageObjectCodec ObjectCodec( nullptr, (BcU32)bcRFF_ALL, (BcU32)bcRFF_TRANSIENT, 0 ); SeJsonWriter Writer( &ObjectCodec ); Writer << Dependencies_; Writer.save( OutputDependencies.c_str() ); } else { PSY_LOG( "FAILED: Time: %.2f seconds.\n", TotalTimer.time() ); BcBreakpoint; } return SaveSuccess; } return BcFalse; }