/*This is the code associated with reading in from files*/ void featureOne() { int counter; for (counter = 0; counter < 2; counter++) { if (counter == 0) { FileReader("observers", counter); } else if (counter == 1) { FileReader("sightings", counter); } } printf("\n==============================================================="); printf("\n%02d / %02d / %04d AT %02d %02d %02d\n", observerTS->day, observerTS->month, observerTS->year, observerTS->hour, observerTS->min, observerTS->second); /* printf("Printing observer list\n"); struct observer *x = observerHead; while(x!=NULL){ printf("Observer: %-4s Lat: %f Long: %f\n",x->observerID,x->latit,x->longi); x=x->next; } */ printf("Latitude Longitude Observer ID Type\n"); struct sighting *i = sightingsHead; while (i != NULL) { if (i->display == 0) { printf("%lf %lf %-4s %-1s\n", i->mammalLoc.lat, i->mammalLoc.lng, i->observerID, i->type); } i = i->next; } printf("\nAny mammals outside of the sea area are not displayed\n"); continueProgram("\nshall we analyse this data to find the duplicate entries? Y/N\n"); }
bool USubstanceImageInput::CanEditChange(const UProperty* InProperty) const { if (SourceFilePath.EndsWith(".tga")) { if (InProperty->GetNameCPP() == TEXT("CompressionAlpha")) { if (ImageA.GetBulkDataSize() == 0) { return false; } } // check the image input is still available before allowing modification of compression level TUniquePtr<FArchive> FileReader(IFileManager::Get().CreateFileReader(*SourceFilePath)); if (!FileReader) { return false; } return true; } else { return false; } }
void FPendingReports::Load() { TUniquePtr<FArchive> FileReader(IFileManager::Get().CreateFileReader(*GetPendingReportsJsonFilepath())); if (!FileReader) { return; } TSharedPtr<FJsonObject> JsonRootObject; if (!FJsonSerializer::Deserialize(TJsonReader<>::Create(FileReader.Get()), JsonRootObject)) { return; } // Array will be empty if there's a type mismatch auto ReportArray = JsonRootObject->GetArrayField(ReportsArrayFieldName); for (auto PathValue: ReportArray) { auto Path = PathValue->AsString(); if (!Path.IsEmpty()) { Reports.Add(Path); } } }
bool Init(const String& path) { File file; if(!file.OpenRead(path)) return false; FileReader stream = FileReader(file); WavHeader riff; stream << riff; if(riff != "RIFF") return false; char riffType[4]; stream.SerializeObject(riffType); if(strncmp(riffType, "WAVE", 4) != 0) return false; while(stream.Tell() < stream.GetSize()) { WavHeader chunkHdr; stream << chunkHdr; if(chunkHdr == "fmt ") { stream << m_format; //Logf("Sample format: %s", Logger::Info, (m_format.nFormat == 1) ? "PCM" : "Unknown"); //Logf("Channels: %d", Logger::Info, m_format.nChannels); //Logf("Sample rate: %d", Logger::Info, m_format.nSampleRate); //Logf("Bps: %d", Logger::Info, m_format.nBitsPerSample); } else if(chunkHdr == "data") // data Chunk { // validate header if(m_format.nFormat != 1) return false; if(m_format.nChannels > 2 || m_format.nChannels == 0) return false; if(m_format.nBitsPerSample != 16) return false; // Read data m_length = chunkHdr.nLength / sizeof(short); m_pcm.resize(chunkHdr.nLength); stream.Serialize(m_pcm.data(), chunkHdr.nLength); } else { stream.Skip(chunkHdr.nLength); } } // Calculate the sample step if the rate is not the same as the output rate double sampleStep = (double)m_format.nSampleRate / (double)m_audio->GetSampleRate(); m_sampleStepIncrement = (uint64)(sampleStep * (double)fp_sampleStep); return true; }
bool FWorldTileInfo::Read(const FString& InPackageFileName, FWorldTileInfo& OutInfo) { // Fill with default information OutInfo = FWorldTileInfo(); // Create a file reader to load the file TScopedPointer<FArchive> FileReader(IFileManager::Get().CreateFileReader(*InPackageFileName)); if (FileReader == NULL) { // Couldn't open the file return false; } // Read package file summary from the file FPackageFileSummary FileSummary; (*FileReader) << FileSummary; // Make sure this is indeed a package if (FileSummary.Tag != PACKAGE_FILE_TAG) { // Unrecognized or malformed package file return false; } // Does the package contains a level info? if (FileSummary.WorldTileInfoDataOffset != 0) { if (!!(FileSummary.PackageFlags & PKG_StoreCompressed)) { check(FileSummary.CompressedChunks.Num() > 0); if (!FileReader->SetCompressionMap(&FileSummary.CompressedChunks, (ECompressionFlags)FileSummary.CompressionFlags)) { FileReader = new FArchiveAsync(*InPackageFileName); // re-assign scope pointer check(!FileReader->IsError()); verify(FileReader->SetCompressionMap(&FileSummary.CompressedChunks, (ECompressionFlags)FileSummary.CompressionFlags)); } } // Seek the the part of the file where the structure lives FileReader->Seek(FileSummary.WorldTileInfoDataOffset); //make sure the filereader gets the correct version number (it defaults to latest version) FileReader->SetUE4Ver(FileSummary.GetFileVersionUE4()); FileReader->SetEngineVer(FileSummary.EngineVersion); // Load the structure *FileReader << OutInfo; } return true; }
void Preview::initializeGL() { CommonGraphicsCommands::initializeGlobalGraphics(); GameObjectManager::globalGameObjectManager.initialize(); std::string err; ShaderInfo* shader = GraphicsShaderManager::globalShaderManager.createShaderInfo( FileReader( "assets/shaders/VertexShader.glsl" ).c_str() , FileReader( "assets/shaders/FragmentShader.glsl" ).c_str() , &err ); std::cout << err.c_str() << std::endl; renderable = GraphicsRenderingManager::globalRenderingManager.addRenderable(); renderable->geometryInfo = 0; renderable->shaderInfo = shader; renderable->culling = CT_BACK; renderable->depthTestEnabled = true; renderable->sharedUniforms = &GraphicsSharedUniformManager::globalSharedUniformManager; renderable->initialize( 10 , 1 ); colorTexture = GraphicsTextureManager::globalTextureManager.addTexture( 0 , 0 , 0 , 0 ); renderable->addTexture( colorTexture ); GameObject* previewModel = GameObjectManager::globalGameObjectManager.addGameObject(); previewModel->addComponent( renderable ); animation = new AnimationRenderingInfo; previewModel->addComponent( animation ); Camera* camera = GraphicsCameraManager::globalCameraManager.addCamera(); camera->direction = glm::normalize( glm::vec3( -1 , 0 , -1 ) ); camera->initializeRenderManagers(); camera->nearestObject = 0.01f; camera->addRenderList( &GraphicsRenderingManager::globalRenderingManager ); fpsInput = new FirstPersonCameraInput; fpsInput->moveSensitivity = 1; fpsInput->rotationSensitivity = 0.1f; GameObject* cameraMan = GameObjectManager::globalGameObjectManager.addGameObject(); cameraMan->translate = glm::vec3( 6 , 5 , 15 ); cameraMan->addComponent( camera ); cameraMan->addComponent( fpsInput ); MouseInput::globalMouseInput.updateOldMousePosition = false; setMouseTracking( true ); QCursor c = cursor(); c.setPos( mapToGlobal( QPoint( width() / 2 , height() / 2 ) ) ); c.setShape( Qt::BlankCursor ); setCursor( c ); MouseInput::globalMouseInput.updateMousePosition( glm::vec2( width() / 2 , height() / 2 ) ); timer = new QTimer(); connect( timer , SIGNAL( timeout() ) , this , SLOT( update() ) ); timer->start( 0 ); }
string Platform::UniqueClientId() const { string machineFile = "/var/lib/dbus/machine-id"; if (IsFileExistsByFullPath("/etc/machine-id")) machineFile = "/etc/machine-id"; if (IsFileExistsByFullPath(machineFile)) { string content; FileReader(machineFile).ReadAsString(content); return content.substr(0, 32); } return "n0dbus0n0lsb00000000000000000000"; }
void * client(void * sock) { int socket = (long) sock; // create a buffer to keep data from request char * buffer = (char *) malloc(BUFFER_SIZE); if (socket < 0) { exit(1); } // Receive data from client recv(socket, buffer, BUFFER_SIZE, 0); // extracting a method and a path from the first line char * method = strtok(buffer, " "); char * path = strtok(NULL, " "); printf("Socket: [%s] %s\n", method, path); char root[256] = ROOT; char url[256] = ""; strcat(url, root); strcat(url, path); int fd; if (is_file_executable(url)){ fd = cgi(url, NULL); } else { fd = open(url, O_RDONLY); } if (fd != -1){ FileReader(fd, &socket); } else { Http404Error(path, &socket); } // close the connection close(socket); }
void Config::loadConfig(std::string a_filePath) { FileReader fr = FileReader(); File configFile = fr.loadFile(a_filePath); std::string thisLine, confName, confValue; for(int i=0; i<configFile.getLineCount(); ++i) { thisLine = configFile.getLine(i); for(unsigned int k=0; k<thisLine.size(); ++k) { if(thisLine[k] == ':') { confName = thisLine.substr(0, k); confValue = thisLine.substr(k+1); setConfig(confName, confValue); } } } }
std::vector<ReaderObject> FileReader::parse_many(const Pathname& pathname) { return {}; #if 0 std::shared_ptr<lisp::Lisp> sexpr = lisp::Parser::parse(pathname.get_sys_path()); if (sexpr) { std::vector<FileReader> sections; for(size_t i = 0; i < sexpr->get_list_size(); ++i) { sections.push_back(FileReader(std::make_shared<SExprFileReaderImpl>(sexpr->get_list_elem(i)))); } return sections; } else { return std::vector<FileReader>(); } #endif }
void FStatsMemoryDumpCommand::InternalRun() { FParse::Value( FCommandLine::Get(), TEXT( "-INFILE=" ), SourceFilepath ); const int64 Size = IFileManager::Get().FileSize( *SourceFilepath ); if( Size < 4 ) { UE_LOG( LogStats, Error, TEXT( "Could not open: %s" ), *SourceFilepath ); return; } TAutoPtr<FArchive> FileReader( IFileManager::Get().CreateFileReader( *SourceFilepath ) ); if( !FileReader ) { UE_LOG( LogStats, Error, TEXT( "Could not open: %s" ), *SourceFilepath ); return; } if( !Stream.ReadHeader( *FileReader ) ) { UE_LOG( LogStats, Error, TEXT( "Could not open, bad magic: %s" ), *SourceFilepath ); return; } UE_LOG( LogStats, Warning, TEXT( "Reading a raw stats file for memory profiling: %s" ), *SourceFilepath ); const bool bIsFinalized = Stream.Header.IsFinalized(); check( bIsFinalized ); check( Stream.Header.Version == EStatMagicWithHeader::VERSION_5 ); StatsThreadStats.MarkAsLoaded(); TArray<FStatMessage> Messages; if( Stream.Header.bRawStatsFile ) { FScopeLogTime SLT( TEXT( "FStatsMemoryDumpCommand::InternalRun" ), nullptr, FScopeLogTime::ScopeLog_Seconds ); // Read metadata. TArray<FStatMessage> MetadataMessages; Stream.ReadFNamesAndMetadataMessages( *FileReader, MetadataMessages ); StatsThreadStats.ProcessMetaDataOnly( MetadataMessages ); // Find all UObject metadata messages. for( const auto& Meta : MetadataMessages ) { FName LongName = Meta.NameAndInfo.GetRawName(); const FString Desc = FStatNameAndInfo::GetShortNameFrom( LongName ).GetPlainNameString(); const bool bContainsUObject = Desc.Contains( TEXT( "//" ) ); if( bContainsUObject ) { UObjectNames.Add( LongName ); } } const int64 CurrentFilePos = FileReader->Tell(); // Update profiler's metadata. CreateThreadsMapping(); // Read frames offsets. Stream.ReadFramesOffsets( *FileReader ); // Buffer used to store the compressed and decompressed data. TArray<uint8> SrcArray; TArray<uint8> DestArray; const bool bHasCompressedData = Stream.Header.HasCompressedData(); check( bHasCompressedData ); TMap<int64, FStatPacketArray> CombinedHistory; int64 TotalDataSize = 0; int64 TotalStatMessagesNum = 0; int64 MaximumPacketSize = 0; int64 TotalPacketsNum = 0; // Read all packets sequentially, force by the memory profiler which is now a part of the raw stats. // !!CAUTION!! Frame number in the raw stats is pointless, because it is time based, not frame based. // Background threads usually execute time consuming operations, so the frame number won't be valid. // Needs to be combined by the thread and the time, not by the frame number. { // Display log information once per 5 seconds to avoid spamming. double PreviousSeconds = FPlatformTime::Seconds(); const int64 FrameOffset0 = Stream.FramesInfo[0].FrameFileOffset; FileReader->Seek( FrameOffset0 ); const int64 FileSize = FileReader->TotalSize(); while( FileReader->Tell() < FileSize ) { // Read the compressed data. FCompressedStatsData UncompressedData( SrcArray, DestArray ); *FileReader << UncompressedData; if( UncompressedData.HasReachedEndOfCompressedData() ) { break; } FMemoryReader MemoryReader( DestArray, true ); FStatPacket* StatPacket = new FStatPacket(); Stream.ReadStatPacket( MemoryReader, *StatPacket ); const int64 StatPacketFrameNum = StatPacket->Frame; FStatPacketArray& Frame = CombinedHistory.FindOrAdd( StatPacketFrameNum ); // Check if we need to combine packets from the same thread. FStatPacket** CombinedPacket = Frame.Packets.FindByPredicate( [&]( FStatPacket* Item ) -> bool { return Item->ThreadId == StatPacket->ThreadId; } ); const int64 PacketSize = StatPacket->StatMessages.GetAllocatedSize(); TotalStatMessagesNum += StatPacket->StatMessages.Num(); if( CombinedPacket ) { TotalDataSize -= (*CombinedPacket)->StatMessages.GetAllocatedSize(); (*CombinedPacket)->StatMessages += StatPacket->StatMessages; TotalDataSize += (*CombinedPacket)->StatMessages.GetAllocatedSize(); delete StatPacket; } else { Frame.Packets.Add( StatPacket ); TotalDataSize += PacketSize; } const double CurrentSeconds = FPlatformTime::Seconds(); if( CurrentSeconds > PreviousSeconds + NumSecondsBetweenLogs ) { const int32 PctPos = int32( 100.0*FileReader->Tell() / FileSize ); UE_LOG( LogStats, Log, TEXT( "%3i%% %10llu (%.1f MB) read messages, last read frame %4i" ), PctPos, TotalStatMessagesNum, TotalDataSize / 1024.0f / 1024.0f, StatPacketFrameNum ); PreviousSeconds = CurrentSeconds; } MaximumPacketSize = FMath::Max( MaximumPacketSize, PacketSize ); TotalPacketsNum++; } } // Dump frame stats for( const auto& It : CombinedHistory ) { const int64 FrameNum = It.Key; int64 FramePacketsSize = 0; int64 FrameStatMessages = 0; int64 FramePackets = It.Value.Packets.Num(); // Threads for( const auto& It2 : It.Value.Packets ) { FramePacketsSize += It2->StatMessages.GetAllocatedSize(); FrameStatMessages += It2->StatMessages.Num(); } UE_LOG( LogStats, Warning, TEXT( "Frame: %10llu/%3lli Size: %.1f MB / %10lli" ), FrameNum, FramePackets, FramePacketsSize / 1024.0f / 1024.0f, FrameStatMessages ); } UE_LOG( LogStats, Warning, TEXT( "TotalPacketSize: %.1f MB, Max: %1f MB" ), TotalDataSize / 1024.0f / 1024.0f, MaximumPacketSize / 1024.0f / 1024.0f ); TArray<int64> Frames; CombinedHistory.GenerateKeyArray( Frames ); Frames.Sort(); const int64 MiddleFrame = Frames[Frames.Num() / 2]; ProcessMemoryOperations( CombinedHistory ); } }
int main() { string line; cout << "Find anagrams for word: "; getline(cin, line); if (line.length() == 1) { FileReader("words_1.txt", line); } else if (line.length() == 2) { FileReader("words_2.txt", line); } else if (line.length() == 3) { FileReader("words_3.txt", line); } else if (line.length() == 4) { FileReader("words_4.txt", line); } else if (line.length() == 5) { FileReader("words_5.txt", line); } else if (line.length() == 6) { FileReader("words_6.txt", line); } else if (line.length() == 7) { FileReader("words_7.txt", line); } else if (line.length() == 8) { FileReader("words_8.txt", line); } else if (line.length() == 9) { FileReader("words_9.txt", line); } else if (line.length() == 10) { FileReader("words_10.txt", line); } else if (line.length() == 11) { FileReader("words_11.txt", line); } else if (line.length() == 12) { FileReader("words_12.txt", line); } else if (line.length() == 13) { FileReader("words_13.txt", line); } else if (line.length() == 14) { FileReader("words_14.txt", line); } else if (line.length() == 15) { FileReader("words_15.txt", line); } else if (line.length() == 16) { FileReader("words_16.txt", line); } else if (line.length() == 17) { FileReader("words_17.txt", line); } else if (line.length() == 18) { FileReader("words_18.txt", line); } else if (line.length() == 19) { FileReader("words_19.txt", line); } else if (line.length() == 20) { FileReader("words_20.txt", line); } else if (line.length() == 21) { FileReader("words_21.txt", line); } else if (line.length() == 22) { FileReader("words_22.txt", line); } else if (line.length() == 23) { FileReader("words_23.txt", line); } else if (line.length() == 24) { FileReader("words_24.txt", line); } else if (line.length() == 25) { FileReader("words_25.txt", line); } else if (line.length() == 26) { FileReader("words_26.txt", line); } }
void USubstanceImageInput::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) { Super::PostEditChangeProperty(PropertyChangedEvent); // change in the consumers array can come from the deletion of a graph instance // in this case we need to remove invalid entries if (PropertyChangedEvent.MemberProperty && PropertyChangedEvent.MemberProperty->GetNameCPP() == TEXT("Consumers")) { for (auto itConsumer = Consumers.CreateIterator(); itConsumer; ++itConsumer) { if ((*itConsumer) == 0) { Consumers.RemoveAt(itConsumer.GetIndex()); itConsumer.Reset(); } } } if (PropertyChangedEvent.MemberProperty && SourceFilePath.EndsWith(".tga")) { // start by reloading the image input TUniquePtr<FArchive> FileReader(IFileManager::Get().CreateFileReader(*SourceFilePath)); // which could be missing if (!FileReader) { return; } uint32 BufferSize = FileReader->TotalSize(); void* Buffer = FMemory::Malloc(BufferSize); FileReader->Serialize(Buffer, BufferSize); const FTGAFileHeader* TGA = (FTGAFileHeader *)Buffer; SizeX = TGA->Width; SizeY = TGA->Height; int32 TextureDataSizeRGBA = SizeX * SizeY * 4; uint32* DecompressedImageRGBA = (uint32*)FMemory::Malloc(TextureDataSizeRGBA); DecompressTGA_helper(TGA, DecompressedImageRGBA, TextureDataSizeRGBA, NULL); FMemory::Free(Buffer); Buffer = NULL; // if the user modified the color channel compression level if (PropertyChangedEvent.MemberProperty->GetNameCPP() == TEXT("CompressionRGB") ) { // replace the eventually compressed color channel by the file's content ImageRGB.RemoveBulkData(); ImageRGB.Lock(LOCK_READ_WRITE); void* ImagePtr = ImageRGB.Realloc(TextureDataSizeRGBA); FMemory::Memcpy(ImagePtr, DecompressedImageRGBA, TextureDataSizeRGBA); // perform compression if not 0 if (CompressionRGB != 0 && ImageRGB.GetBulkDataSize()) { TArray<uint8> CompressedImageRGB; // for retro compatibility, 1 = default compression quality, otherwise, its the desired compression level int32 Quality = FMath::Clamp(CompressionRGB == 1 ? 85 : CompressionRGB, 0, 100); Substance::Helpers::CompressJpeg( ImagePtr, ImageRGB.GetBulkDataSize(), SizeX, SizeY, 4, CompressedImageRGB, Quality); ImagePtr = ImageRGB.Realloc(CompressedImageRGB.Num()); FMemory::Memcpy(ImagePtr, CompressedImageRGB.GetData(), CompressedImageRGB.Num()); } ImageRGB.Unlock(); } else if (PropertyChangedEvent.MemberProperty->GetNameCPP() == TEXT("CompressionAlpha") && ImageA.GetBulkDataSize()) { int32 TextureDataSizeA = SizeX * SizeY; uint32* DecompressedImageA = (uint32*)FMemory::Malloc(TextureDataSizeA); Substance::Helpers::Split_RGBA_8bpp( SizeX, SizeY, (uint8*)DecompressedImageRGBA, TextureDataSizeRGBA, (uint8*)DecompressedImageA, TextureDataSizeA); ImageA.RemoveBulkData(); ImageA.Lock(LOCK_READ_WRITE); void* AlphaPtr = ImageA.Realloc(TextureDataSizeA); FMemory::Memcpy(AlphaPtr, DecompressedImageA, TextureDataSizeA); FMemory::Free(DecompressedImageA); DecompressedImageA = NULL; if (CompressionAlpha != 0) { TArray<uint8> CompressedImageA; // for retro compatibility, 1 = default compression quality, otherwise, its the desired compression level int32 Quality = FMath::Clamp(CompressionAlpha == 1 ? 85 : CompressionAlpha, 0, 100); Substance::Helpers::CompressJpeg( AlphaPtr, ImageA.GetBulkDataSize(), SizeX, SizeY, 1, CompressedImageA, Quality); AlphaPtr = ImageA.Realloc(CompressedImageA.Num()); FMemory::Memcpy(AlphaPtr, CompressedImageA.GetData(), CompressedImageA.Num()); } ImageA.Unlock(); } FMemory::Free(DecompressedImageRGBA); DecompressedImageRGBA = NULL; } else { CompressionRGB = 1; } // update eventual Substance using this image input for (auto itConsumer = Consumers.CreateIterator(); itConsumer; ++itConsumer) { (*itConsumer)->Instance->UpdateInput( 0, this); Substance::Helpers::RenderAsync((*itConsumer)->Instance); } }
void FRawProfilerSession::PrepareLoading() { SCOPE_LOG_TIME_FUNC(); const FString Filepath = DataFilepath + FStatConstants::StatsFileRawExtension; const int64 Size = IFileManager::Get().FileSize( *Filepath ); if( Size < 4 ) { UE_LOG( LogStats, Error, TEXT( "Could not open: %s" ), *Filepath ); return; } TAutoPtr<FArchive> FileReader( IFileManager::Get().CreateFileReader( *Filepath ) ); if( !FileReader ) { UE_LOG( LogStats, Error, TEXT( "Could not open: %s" ), *Filepath ); return; } if( !Stream.ReadHeader( *FileReader ) ) { UE_LOG( LogStats, Error, TEXT( "Could not open, bad magic: %s" ), *Filepath ); return; } const bool bIsFinalized = Stream.Header.IsFinalized(); check( bIsFinalized ); check( Stream.Header.Version == EStatMagicWithHeader::VERSION_5 ); StatsThreadStats.MarkAsLoaded(); TArray<FStatMessage> Messages; if( Stream.Header.bRawStatsFile ) { // Read metadata. TArray<FStatMessage> MetadataMessages; Stream.ReadFNamesAndMetadataMessages( *FileReader, MetadataMessages ); StatsThreadStats.ProcessMetaDataOnly( MetadataMessages ); const FName F00245 = FName(245, 245, 0); const FName F11602 = FName(11602, 11602, 0); const FName F06394 = FName(6394, 6394, 0); const int64 CurrentFilePos = FileReader->Tell(); // Update profiler's metadata. StatMetaData->UpdateFromStatsState( StatsThreadStats ); const uint32 GameThreadID = GetMetaData()->GetGameThreadID(); // Read frames offsets. Stream.ReadFramesOffsets( *FileReader ); // Buffer used to store the compressed and decompressed data. TArray<uint8> SrcArray; TArray<uint8> DestArray; const bool bHasCompressedData = Stream.Header.HasCompressedData(); check(bHasCompressedData); TMap<int64, FStatPacketArray> CombinedHistory; int64 TotalPacketSize = 0; int64 MaximumPacketSize = 0; // Read all packets sequentially, force by the memory profiler which is now a part of the raw stats. // !!CAUTION!! Frame number in the raw stats is pointless, because it is time based, not frame based. // Background threads usually execute time consuming operations, so the frame number won't be valid. // Needs to be combined by the thread and the time, not by the frame number. { int64 FrameOffset0 = Stream.FramesInfo[0].FrameFileOffset; FileReader->Seek( FrameOffset0 ); const int64 FileSize = FileReader->TotalSize(); while( FileReader->Tell() < FileSize ) { // Read the compressed data. FCompressedStatsData UncompressedData( SrcArray, DestArray ); *FileReader << UncompressedData; if( UncompressedData.HasReachedEndOfCompressedData() ) { break; } FMemoryReader MemoryReader( DestArray, true ); FStatPacket* StatPacket = new FStatPacket(); Stream.ReadStatPacket( MemoryReader, *StatPacket ); const int64 FrameNum = StatPacket->Frame; FStatPacketArray& Frame = CombinedHistory.FindOrAdd(FrameNum); // Check if we need to combine packets from the same thread. FStatPacket** CombinedPacket = Frame.Packets.FindByPredicate([&](FStatPacket* Item) -> bool { return Item->ThreadId == StatPacket->ThreadId; }); if( CombinedPacket ) { (*CombinedPacket)->StatMessages += StatPacket->StatMessages; } else { Frame.Packets.Add(StatPacket); } const int64 CurrentPos = FileReader->Tell(); const int32 PctPos = int32(100.0f*CurrentPos/FileSize); UE_LOG( LogStats, Log, TEXT( "%3i Processing FStatPacket: Frame %5i for thread %5i with %6i messages (%.1f MB)" ), PctPos, StatPacket->Frame, StatPacket->ThreadId, StatPacket->StatMessages.Num(), StatPacket->StatMessages.GetAllocatedSize()/1024.0f/1024.0f ); const int64 PacketSize = StatPacket->StatMessages.GetAllocatedSize(); TotalPacketSize += PacketSize; MaximumPacketSize = FMath::Max( MaximumPacketSize, PacketSize ); } } UE_LOG( LogStats, Log, TEXT( "TotalPacketSize: %.1f MB, Max: %1f MB" ), TotalPacketSize/1024.0f/1024.0f, MaximumPacketSize/1024.0f/1024.0f ); TArray<int64> Frames; CombinedHistory.GenerateKeyArray(Frames); Frames.Sort(); const int64 MiddleFrame = Frames[Frames.Num()/2]; // Remove all frames without the game thread messages. for (int32 FrameIndex = 0; FrameIndex < Frames.Num(); ++FrameIndex) { const int64 TargetFrame = Frames[FrameIndex]; const FStatPacketArray& Frame = CombinedHistory.FindChecked( TargetFrame ); const double GameThreadTimeMS = GetMetaData()->ConvertCyclesToMS( GetFastThreadFrameTimeInternal( Frame, EThreadType::Game ) ); if (GameThreadTimeMS == 0.0f) { CombinedHistory.Remove( TargetFrame ); Frames.RemoveAt( FrameIndex ); FrameIndex--; } } StatMetaData->SecondsPerCycle = GetSecondsPerCycle( CombinedHistory.FindChecked(MiddleFrame) ); check( StatMetaData->GetSecondsPerCycle() > 0.0 ); //const int32 FirstGameThreadFrame = FindFirstFrameWithGameThread( CombinedHistory, Frames ); // Prepare profiler frame. { SCOPE_LOG_TIME( TEXT( "Preparing profiler frames" ), nullptr ); // Prepare profiler frames. double ElapsedTimeMS = 0; for( int32 FrameIndex = 0; FrameIndex < Frames.Num(); ++FrameIndex ) { const int64 TargetFrame = Frames[FrameIndex]; const FStatPacketArray& Frame = CombinedHistory.FindChecked(TargetFrame); const double GameThreadTimeMS = GetMetaData()->ConvertCyclesToMS( GetFastThreadFrameTimeInternal(Frame,EThreadType::Game) ); if( GameThreadTimeMS == 0.0f ) { continue; } const double RenderThreadTimeMS = GetMetaData()->ConvertCyclesToMS( GetFastThreadFrameTimeInternal(Frame,EThreadType::Renderer) ); // Update mini-view, convert from cycles to ms. TMap<uint32, float> ThreadTimesMS; ThreadTimesMS.Add( GameThreadID, GameThreadTimeMS ); ThreadTimesMS.Add( GetMetaData()->GetRenderThreadID()[0], RenderThreadTimeMS ); // Pass the reference to the stats' metadata. OnAddThreadTime.ExecuteIfBound( FrameIndex, ThreadTimesMS, StatMetaData ); // Create a new profiler frame and add it to the stream. ElapsedTimeMS += GameThreadTimeMS; FProfilerFrame* ProfilerFrame = new FProfilerFrame( TargetFrame, GameThreadTimeMS, ElapsedTimeMS ); ProfilerFrame->ThreadTimesMS = ThreadTimesMS; ProfilerStream.AddProfilerFrame( TargetFrame, ProfilerFrame ); } } // Process the raw stats data. { SCOPE_LOG_TIME( TEXT( "Processing the raw stats" ), nullptr ); double CycleCounterAdjustmentMS = 0.0f; // Read the raw stats messages. for( int32 FrameIndex = 0; FrameIndex < Frames.Num()-1; ++FrameIndex ) { const int64 TargetFrame = Frames[FrameIndex]; const FStatPacketArray& Frame = CombinedHistory.FindChecked(TargetFrame); FProfilerFrame* ProfilerFrame = ProfilerStream.GetProfilerFrame( FrameIndex ); UE_CLOG( FrameIndex % 8 == 0, LogStats, Log, TEXT( "Processing raw stats frame: %4i/%4i" ), FrameIndex, Frames.Num() ); ProcessStatPacketArray( Frame, *ProfilerFrame, FrameIndex ); // or ProfilerFrame->TargetFrame // Find the first cycle counter for the game thread. if( CycleCounterAdjustmentMS == 0.0f ) { CycleCounterAdjustmentMS = ProfilerFrame->Root->CycleCounterStartTimeMS; } // Update thread time and mark profiler frame as valid and ready for use. ProfilerFrame->MarkAsValid(); } // Adjust all profiler frames. ProfilerStream.AdjustCycleCounters( CycleCounterAdjustmentMS ); } } const int64 AllocatedSize = ProfilerStream.GetAllocatedSize(); // We have the whole metadata and basic information about the raw stats file, start ticking the profiler session. //OnTickHandle = FTicker::GetCoreTicker().AddTicker( OnTick, 0.25f ); #if 0 if( SessionType == EProfilerSessionTypes::OfflineRaw ) { // Broadcast that a capture file has been fully processed. OnCaptureFileProcessed.ExecuteIfBound( GetInstanceID() ); } #endif // 0 }
int32 UGatherTextFromAssetsCommandlet::Main(const FString& Params) { // Parse command line. TArray<FString> Tokens; TArray<FString> Switches; TMap<FString, FString> ParamVals; UCommandlet::ParseCommandLine(*Params, Tokens, Switches, ParamVals); //Set config file const FString* ParamVal = ParamVals.Find(FString(TEXT("Config"))); FString GatherTextConfigPath; if ( ParamVal ) { GatherTextConfigPath = *ParamVal; } else { UE_LOG(LogGatherTextFromAssetsCommandlet, Error, TEXT("No config specified.")); return -1; } //Set config section ParamVal = ParamVals.Find(FString(TEXT("Section"))); FString SectionName; if ( ParamVal ) { SectionName = *ParamVal; } else { UE_LOG(LogGatherTextFromAssetsCommandlet, Error, TEXT("No config section specified.")); return -1; } //Include paths TArray<FString> IncludePaths; GetConfigArray(*SectionName, TEXT("IncludePaths"), IncludePaths, GatherTextConfigPath); if (IncludePaths.Num() == 0) { UE_LOG(LogGatherTextFromAssetsCommandlet, Error, TEXT("No include paths in section %s"), *SectionName); return -1; } //Exclude paths TArray<FString> ExcludePaths; GetConfigArray(*SectionName, TEXT("ExcludePaths"), ExcludePaths, GatherTextConfigPath); //package extensions TArray<FString> PackageExts; GetConfigArray(*SectionName, TEXT("PackageExtensions"), PackageExts, GatherTextConfigPath); if (PackageExts.Num() == 0) { UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("No package extensions specified in section %s, using defaults"), *SectionName); PackageExts.Add(FString("*") + FPackageName::GetAssetPackageExtension()); PackageExts.Add(FString("*") + FPackageName::GetMapPackageExtension()); } //asset class exclude TArray<FString> ExcludeClasses; GetConfigArray(*SectionName, TEXT("ExcludeClasses"), ExcludeClasses, GatherTextConfigPath); FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>(TEXT("AssetRegistry")); AssetRegistryModule.Get().SearchAllAssets( true ); FARFilter Filter; for(int32 i = 0; i < ExcludeClasses.Num(); i++) { UClass* FilterClass = FindObject<UClass>(ANY_PACKAGE, *ExcludeClasses[i]); if(FilterClass) { Filter.ClassNames.Add( FilterClass->GetFName() ); } else { UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("Invalid exclude class %s"), *ExcludeClasses[i]); } } TArray<FAssetData> AssetData; AssetRegistryModule.Get().GetAssets(Filter, AssetData); FString UAssetPackageExtension = FPackageName::GetAssetPackageExtension(); TSet< FString > LongPackageNamesToExclude; for (int Index = 0; Index < AssetData.Num(); Index++) { LongPackageNamesToExclude.Add( FPackageName::LongPackageNameToFilename( AssetData[Index].PackageName.ToString(), UAssetPackageExtension ) ); } //Get whether we should fix broken properties that we find. GetConfigBool(*SectionName, TEXT("bFixBroken"), bFixBroken, GatherTextConfigPath); // Add any manifest dependencies if they were provided TArray<FString> ManifestDependenciesList; GetConfigArray(*SectionName, TEXT("ManifestDependencies"), ManifestDependenciesList, GatherTextConfigPath); if( !ManifestInfo->AddManifestDependencies( ManifestDependenciesList ) ) { UE_LOG(LogGatherTextFromAssetsCommandlet, Error, TEXT("The GatherTextFromAssets commandlet couldn't find all the specified manifest dependencies.")); return -1; } //The main array of files to work from. TArray< FString > PackageFileNamesToLoad; TSet< FString > LongPackageNamesToProcess; TArray<FString> PackageFilesNotInIncludePath; TArray<FString> PackageFilesInExcludePath; TArray<FString> PackageFilesExcludedByClass; //Fill the list of packages to work from. uint8 PackageFilter = NORMALIZE_DefaultFlags; TArray<FString> Unused; for ( int32 PackageFilenameWildcardIdx = 0; PackageFilenameWildcardIdx < PackageExts.Num(); PackageFilenameWildcardIdx++ ) { const bool IsAssetPackage = PackageExts[PackageFilenameWildcardIdx] == ( FString( TEXT("*") )+ FPackageName::GetAssetPackageExtension() ); TArray<FString> PackageFiles; if ( !NormalizePackageNames( Unused, PackageFiles, PackageExts[PackageFilenameWildcardIdx], PackageFilter) ) { UE_LOG(LogGatherTextFromAssetsCommandlet, Display, TEXT("No packages found with extension %i: '%s'"), PackageFilenameWildcardIdx, *PackageExts[PackageFilenameWildcardIdx]); continue; } else { UE_LOG(LogGatherTextFromAssetsCommandlet, Display, TEXT("Found %i packages with extension %i: '%s'"), PackageFiles.Num(), PackageFilenameWildcardIdx, *PackageExts[PackageFilenameWildcardIdx]); } //Run through all the files found and add any that pass the include, exclude and filter constraints to OrderedPackageFilesToLoad for( int32 PackageFileIdx=0; PackageFileIdx<PackageFiles.Num(); ++PackageFileIdx ) { bool bExclude = false; //Ensure it matches the include paths if there are some. for( int32 IncludePathIdx=0; IncludePathIdx<IncludePaths.Num() ; ++IncludePathIdx ) { bExclude = true; if( PackageFiles[PackageFileIdx].MatchesWildcard(IncludePaths[IncludePathIdx]) ) { bExclude = false; break; } } if ( bExclude ) { PackageFilesNotInIncludePath.Add(PackageFiles[PackageFileIdx]); } //Ensure it does not match the exclude paths if there are some. for( int32 ExcludePathIdx=0; !bExclude && ExcludePathIdx<ExcludePaths.Num() ; ++ExcludePathIdx ) { if( PackageFiles[PackageFileIdx].MatchesWildcard(ExcludePaths[ExcludePathIdx]) ) { bExclude = true; PackageFilesInExcludePath.Add(PackageFiles[PackageFileIdx]); break; } } //Check that this is not on the list of packages that we don't care about e.g. textures. if ( !bExclude && IsAssetPackage && LongPackageNamesToExclude.Contains( PackageFiles[PackageFileIdx] ) ) { bExclude = true; PackageFilesExcludedByClass.Add(PackageFiles[PackageFileIdx]); } //If we haven't failed one of the above checks, add it to the array of packages to process. if(!bExclude) { TScopedPointer< FArchive > FileReader( IFileManager::Get().CreateFileReader( *PackageFiles[PackageFileIdx] ) ); if( FileReader ) { // Read package file summary from the file FPackageFileSummary PackageSummary; (*FileReader) << PackageSummary; // Early out check if the package has been flagged as needing localization gathering if( PackageSummary.PackageFlags & PKG_RequiresLocalizationGather || PackageSummary.GetFileVersionUE4() < VER_UE4_PACKAGE_REQUIRES_LOCALIZATION_GATHER_FLAGGING ) { PackageFileNamesToLoad.Add( PackageFiles[PackageFileIdx] ); } } } } } if ( PackageFileNamesToLoad.Num() == 0 ) { UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("No files found. Or none passed the include/exclude criteria.")); } CollectGarbage( RF_Native ); //Now go through the remaining packages in the main array and process them in batches. int32 PackagesPerBatchCount = 100; TArray< UPackage* > LoadedPackages; TArray< FString > LoadedPackageFileNames; TArray< FString > FailedPackageFileNames; TArray< UPackage* > PackagesToProcess; const int32 PackageCount = PackageFileNamesToLoad.Num(); const int32 BatchCount = PackageCount / PackagesPerBatchCount + (PackageCount % PackagesPerBatchCount > 0 ? 1 : 0); // Add an extra batch for any remainder if necessary if(PackageCount > 0) { UE_LOG(LogGatherTextFromAssetsCommandlet, Log, TEXT("Loading %i packages in %i batches of %i."), PackageCount, BatchCount, PackagesPerBatchCount); } //Load the packages in batches int32 PackageIndex = 0; for( int32 BatchIndex = 0; BatchIndex < BatchCount; ++BatchIndex ) { int32 PackagesInThisBatch = 0; for( PackageIndex; PackageIndex < PackageCount && PackagesInThisBatch < PackagesPerBatchCount; ++PackageIndex ) { FString PackageFileName = PackageFileNamesToLoad[PackageIndex]; UPackage *Package = LoadPackage( NULL, *PackageFileName, LOAD_None ); if( Package ) { LoadedPackages.Add(Package); LoadedPackageFileNames.Add(PackageFileName); // Because packages may not have been resaved after this flagging was implemented, we may have added packages to load that weren't flagged - potential false positives. // The loading process should have reflagged said packages so that only true positives will have this flag. if( Package->RequiresLocalizationGather() ) { PackagesToProcess.Add( Package ); } } else { FailedPackageFileNames.Add( PackageFileName ); continue; } ++PackagesInThisBatch; } UE_LOG(LogGatherTextFromAssetsCommandlet, Log, TEXT("Loaded %i packages in batch %i of %i."), PackagesInThisBatch, BatchIndex + 1, BatchCount); ProcessPackages(PackagesToProcess); PackagesToProcess.Empty(PackagesPerBatchCount); if( bFixBroken ) { for( int32 LoadedPackageIndex=0; LoadedPackageIndex < LoadedPackages.Num() ; ++LoadedPackageIndex ) { UPackage *Package = LoadedPackages[LoadedPackageIndex]; const FString PackageName = LoadedPackageFileNames[LoadedPackageIndex]; //Todo - link with source control. if( Package ) { if( Package->IsDirty() ) { if( SavePackageHelper( Package, *PackageName ) ) { UE_LOG(LogGatherTextFromAssetsCommandlet, Log, TEXT("Saved Package %s."),*PackageName); } else { //TODO - Work out how to integrate with source control. The code from the source gatherer doesn't work. UE_LOG(LogGatherTextFromAssetsCommandlet, Log, TEXT("Could not save package %s. Probably due to source control. "),*PackageName); } } } else { UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("Failed to find one of the loaded packages.")); } } } CollectGarbage( RF_Native ); LoadedPackages.Empty(PackagesPerBatchCount); LoadedPackageFileNames.Empty(PackagesPerBatchCount); } for(auto i = ConflictTracker.Namespaces.CreateConstIterator(); i; ++i) { const FString& NamespaceName = i.Key(); const FConflictTracker::FKeyTable& KeyTable = i.Value(); for(auto j = KeyTable.CreateConstIterator(); j; ++j) { const FString& KeyName = j.Key(); const FConflictTracker::FEntryArray& EntryArray = j.Value(); for(int k = 0; k < EntryArray.Num(); ++k) { const FConflictTracker::FEntry& Entry = EntryArray[k]; switch(Entry.Status) { case EAssetTextGatherStatus::MissingKey: { UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("Detected missing key on asset \"%s\"."), *Entry.ObjectPath); } break; case EAssetTextGatherStatus::MissingKey_Resolved: { UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("Fixed missing key on asset \"%s\"."), *Entry.ObjectPath); } break; case EAssetTextGatherStatus::IdentityConflict: { UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("Detected duplicate identity with differing source on asset \"%s\"."), *Entry.ObjectPath); } break; case EAssetTextGatherStatus::IdentityConflict_Resolved: { UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("Fixed duplicate identity with differing source on asset \"%s\"."), *Entry.ObjectPath); } break; } } } } return 0; }
bool FXmlFile::LoadFile(const FString& InFile, EConstructMethod::Type ConstructMethod) { // Remove any file stuff if it already exists Clear(); // So far no error (Early so it can be overwritten below by errors) ErrorMessage = NSLOCTEXT("XmlParser", "LoadSuccess", "XmlFile was loaded successfully").ToString(); TArray<FString> Input; if(ConstructMethod == EConstructMethod::ConstructFromFile) { // Create file reader TUniquePtr<FArchive> FileReader(IFileManager::Get().CreateFileReader(*InFile)); if(!FileReader) { ErrorMessage = NSLOCTEXT("XmlParser", "FileLoadFail", "Failed to load the file").ToString(); ErrorMessage += TEXT("\""); ErrorMessage += InFile; ErrorMessage += TEXT("\""); return false; } // Create buffer for file input uint32 BufferSize = FileReader->TotalSize(); void* Buffer = FMemory::Malloc(BufferSize); FileReader->Serialize(Buffer, BufferSize); // Parse file buffer into an array of lines if (!FindCharSizeAndSplitLines(Input, Buffer, BufferSize)) { ErrorMessage = NSLOCTEXT("XmlParser", "InvalidFormatFail", "Failed to parse the file (Unsupported character encoding)").ToString(); ErrorMessage += TEXT("\""); ErrorMessage += InFile; ErrorMessage += TEXT("\""); return false; } // Release resources FMemory::Free(Buffer); } else { // Parse input buffer into an array of lines SplitLines(Input, *InFile, *InFile + InFile.Len()); } // Pre-process the input PreProcessInput(Input); // Tokenize the input TArray<FString> Tokens = Tokenize(Input); // Parse the input & create the nodes CreateNodes(Tokens); // All done with creation, set up necessary information if(bFileLoaded == true) { if(ConstructMethod == EConstructMethod::ConstructFromFile) { LoadedFile = InFile; } } else { LoadedFile = TEXT(""); RootNode = nullptr; } // Now check the status flag of the creation. It may have actually failed, but given us a // partially created representation if(bCreationFailed) { Clear(); } return bFileLoaded; }
FStatsThreadState::FStatsThreadState( FString const& Filename ) : HistoryFrames( MAX_int32 ) , MaxFrameSeen( -1 ) , MinFrameSeen( -1 ) , LastFullFrameMetaAndNonFrame( -1 ) , LastFullFrameProcessed( -1 ) , bWasLoaded( true ) , CurrentGameFrame( -1 ) , CurrentRenderFrame( -1 ) { const int64 Size = IFileManager::Get().FileSize( *Filename ); if( Size < 4 ) { UE_LOG( LogStats, Error, TEXT( "Could not open: %s" ), *Filename ); return; } TAutoPtr<FArchive> FileReader( IFileManager::Get().CreateFileReader( *Filename ) ); if( !FileReader ) { UE_LOG( LogStats, Error, TEXT( "Could not open: %s" ), *Filename ); return; } FStatsReadStream Stream; if( !Stream.ReadHeader( *FileReader ) ) { UE_LOG( LogStats, Error, TEXT( "Could not open, bad magic: %s" ), *Filename ); return; } // Test version only works for the finalized stats files. const bool bIsFinalized = Stream.Header.IsFinalized(); check( bIsFinalized ); TArray<FStatMessage> Messages; if( Stream.Header.bRawStatsFile ) { const int64 CurrentFilePos = FileReader->Tell(); // Read metadata. TArray<FStatMessage> MetadataMessages; Stream.ReadFNamesAndMetadataMessages( *FileReader, MetadataMessages ); ProcessMetaDataForLoad( MetadataMessages ); // Read frames offsets. Stream.ReadFramesOffsets( *FileReader ); // Verify frames offsets. for( int32 FrameIndex = 0; FrameIndex < Stream.FramesInfo.Num(); ++FrameIndex ) { const int64 FrameFileOffset = Stream.FramesInfo[FrameIndex].FrameFileOffset; FileReader->Seek( FrameFileOffset ); int64 TargetFrame; *FileReader << TargetFrame; } FileReader->Seek( Stream.FramesInfo[0].FrameFileOffset ); // Read the raw stats messages. FStatPacketArray IncomingData; for( int32 FrameIndex = 0; FrameIndex < Stream.FramesInfo.Num(); ++FrameIndex ) { int64 TargetFrame; *FileReader << TargetFrame; int32 NumPackets; *FileReader << NumPackets; for( int32 PacketIndex = 0; PacketIndex < NumPackets; PacketIndex++ ) { FStatPacket* ToRead = new FStatPacket(); Stream.ReadStatPacket( *FileReader, *ToRead, bIsFinalized ); IncomingData.Packets.Add( ToRead ); } FStatPacketArray NowData; // This is broken, do not use. // Exchange( NowData.Packets, IncomingData.Packets ); // ScanForAdvance( NowData ); // AddToHistoryAndEmpty( NowData ); // check( !NowData.Packets.Num() ); } } else { // Read the condensed stats messages. while( FileReader->Tell() < Size ) { FStatMessage Read( Stream.ReadMessage( *FileReader ) ); if( Read.NameAndInfo.GetField<EStatOperation>() == EStatOperation::SpecialMessageMarker ) { // Simply break the loop. // The profiler supports more advanced handling of this message. break; } else if( Read.NameAndInfo.GetField<EStatOperation>() == EStatOperation::AdvanceFrameEventGameThread ) { ProcessMetaDataForLoad( Messages ); if( CurrentGameFrame > 0 && Messages.Num() ) { check( !CondensedStackHistory.Contains( CurrentGameFrame ) ); TArray<FStatMessage>* Save = new TArray<FStatMessage>(); Exchange( *Save, Messages ); CondensedStackHistory.Add( CurrentGameFrame, Save ); GoodFrames.Add( CurrentGameFrame ); } } new (Messages)FStatMessage( Read ); } // meh, we will discard the last frame, but we will look for meta data } }
int32 UGatherTextFromAssetsCommandlet::Main(const FString& Params) { // Parse command line. TArray<FString> Tokens; TArray<FString> Switches; TMap<FString, FString> ParamVals; UCommandlet::ParseCommandLine(*Params, Tokens, Switches, ParamVals); //Set config file const FString* ParamVal = ParamVals.Find(FString(TEXT("Config"))); FString GatherTextConfigPath; if ( ParamVal ) { GatherTextConfigPath = *ParamVal; } else { UE_LOG(LogGatherTextFromAssetsCommandlet, Error, TEXT("No config specified.")); return -1; } //Set config section ParamVal = ParamVals.Find(FString(TEXT("Section"))); FString SectionName; if ( ParamVal ) { SectionName = *ParamVal; } else { UE_LOG(LogGatherTextFromAssetsCommandlet, Error, TEXT("No config section specified.")); return -1; } //Modules to Preload TArray<FString> ModulesToPreload; GetStringArrayFromConfig(*SectionName, TEXT("ModulesToPreload"), ModulesToPreload, GatherTextConfigPath); for (const FString& ModuleName : ModulesToPreload) { FModuleManager::Get().LoadModule(*ModuleName); } // IncludePathFilters TArray<FString> IncludePathFilters; GetPathArrayFromConfig(*SectionName, TEXT("IncludePathFilters"), IncludePathFilters, GatherTextConfigPath); // IncludePaths (DEPRECATED) { TArray<FString> IncludePaths; GetPathArrayFromConfig(*SectionName, TEXT("IncludePaths"), IncludePaths, GatherTextConfigPath); if (IncludePaths.Num()) { IncludePathFilters.Append(IncludePaths); UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("IncludePaths detected in section %s. IncludePaths is deprecated, please use IncludePathFilters."), *SectionName); } } if (IncludePathFilters.Num() == 0) { UE_LOG(LogGatherTextFromAssetsCommandlet, Error, TEXT("No include path filters in section %s."), *SectionName); return -1; } // ExcludePathFilters TArray<FString> ExcludePathFilters; GetPathArrayFromConfig(*SectionName, TEXT("ExcludePathFilters"), ExcludePathFilters, GatherTextConfigPath); // ExcludePaths (DEPRECATED) { TArray<FString> ExcludePaths; GetPathArrayFromConfig(*SectionName, TEXT("ExcludePaths"), ExcludePaths, GatherTextConfigPath); if (ExcludePaths.Num()) { ExcludePathFilters.Append(ExcludePaths); UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("ExcludePaths detected in section %s. ExcludePaths is deprecated, please use ExcludePathFilters."), *SectionName); } } // PackageNameFilters TArray<FString> PackageFileNameFilters; GetStringArrayFromConfig(*SectionName, TEXT("PackageFileNameFilters"), PackageFileNameFilters, GatherTextConfigPath); // PackageExtensions (DEPRECATED) { TArray<FString> PackageExtensions; GetStringArrayFromConfig(*SectionName, TEXT("PackageExtensions"), PackageExtensions, GatherTextConfigPath); if (PackageExtensions.Num()) { PackageFileNameFilters.Append(PackageExtensions); UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("PackageExtensions detected in section %s. PackageExtensions is deprecated, please use PackageFileNameFilters."), *SectionName); } } if (PackageFileNameFilters.Num() == 0) { UE_LOG(LogGatherTextFromAssetsCommandlet, Error, TEXT("No package file name filters in section %s."), *SectionName); return -1; } //asset class exclude TArray<FString> ExcludeClasses; GetStringArrayFromConfig(*SectionName, TEXT("ExcludeClasses"), ExcludeClasses, GatherTextConfigPath); FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked<FAssetRegistryModule>(TEXT("AssetRegistry")); AssetRegistryModule.Get().SearchAllAssets( true ); FARFilter Filter; for(const auto& ExcludeClass : ExcludeClasses) { UClass* FilterClass = FindObject<UClass>(ANY_PACKAGE, *ExcludeClass); if(FilterClass) { Filter.ClassNames.Add( FilterClass->GetFName() ); } else { UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("Invalid exclude class %s"), *ExcludeClass); } } TArray<FAssetData> AssetDataArray; AssetRegistryModule.Get().GetAssets(Filter, AssetDataArray); FString UAssetPackageExtension = FPackageName::GetAssetPackageExtension(); TSet< FString > LongPackageNamesToExclude; for (int Index = 0; Index < AssetDataArray.Num(); Index++) { LongPackageNamesToExclude.Add( FPackageName::LongPackageNameToFilename( AssetDataArray[Index].PackageName.ToString(), UAssetPackageExtension ) ); } //Get whether we should fix broken properties that we find. if (!GetBoolFromConfig(*SectionName, TEXT("bFixBroken"), bFixBroken, GatherTextConfigPath)) { bFixBroken = false; } // Get whether we should gather editor-only data. Typically only useful for the localization of UE4 itself. if (!GetBoolFromConfig(*SectionName, TEXT("ShouldGatherFromEditorOnlyData"), ShouldGatherFromEditorOnlyData, GatherTextConfigPath)) { ShouldGatherFromEditorOnlyData = false; } // Add any manifest dependencies if they were provided TArray<FString> ManifestDependenciesList; GetPathArrayFromConfig(*SectionName, TEXT("ManifestDependencies"), ManifestDependenciesList, GatherTextConfigPath); if( !ManifestInfo->AddManifestDependencies( ManifestDependenciesList ) ) { UE_LOG(LogGatherTextFromAssetsCommandlet, Error, TEXT("The GatherTextFromAssets commandlet couldn't find all the specified manifest dependencies.")); return -1; } //The main array of files to work from. TArray< FString > PackageFileNamesToProcess; TArray<FString> PackageFilesNotInIncludePath; TArray<FString> PackageFilesInExcludePath; TArray<FString> PackageFilesExcludedByClass; //Fill the list of packages to work from. uint8 PackageFilter = NORMALIZE_DefaultFlags; TArray<FString> Unused; for ( int32 PackageFilenameWildcardIdx = 0; PackageFilenameWildcardIdx < PackageFileNameFilters.Num(); PackageFilenameWildcardIdx++ ) { const bool IsAssetPackage = PackageFileNameFilters[PackageFilenameWildcardIdx] == ( FString( TEXT("*") )+ FPackageName::GetAssetPackageExtension() ); TArray<FString> PackageFiles; if ( !NormalizePackageNames( Unused, PackageFiles, PackageFileNameFilters[PackageFilenameWildcardIdx], PackageFilter) ) { UE_LOG(LogGatherTextFromAssetsCommandlet, Display, TEXT("No packages found with extension %i: '%s'"), PackageFilenameWildcardIdx, *PackageFileNameFilters[PackageFilenameWildcardIdx]); continue; } else { UE_LOG(LogGatherTextFromAssetsCommandlet, Display, TEXT("Found %i packages with extension %i: '%s'"), PackageFiles.Num(), PackageFilenameWildcardIdx, *PackageFileNameFilters[PackageFilenameWildcardIdx]); } //Run through all the files found and add any that pass the include, exclude and filter constraints to OrderedPackageFilesToLoad for (FString& PackageFile : PackageFiles) { PackageFile = FPaths::ConvertRelativePathToFull(PackageFile); bool bExclude = false; //Ensure it matches the include paths if there are some. for (FString& IncludePath : IncludePathFilters) { bExclude = true; if( PackageFile.MatchesWildcard(IncludePath) ) { bExclude = false; break; } } if ( bExclude ) { PackageFilesNotInIncludePath.Add(PackageFile); } //Ensure it does not match the exclude paths if there are some. for (const FString& ExcludePath : ExcludePathFilters) { if (PackageFile.MatchesWildcard(ExcludePath)) { bExclude = true; PackageFilesInExcludePath.Add(PackageFile); break; } } //Check that this is not on the list of packages that we don't care about e.g. textures. if ( !bExclude && IsAssetPackage && LongPackageNamesToExclude.Contains( PackageFile ) ) { bExclude = true; PackageFilesExcludedByClass.Add(PackageFile); } //If we haven't failed one of the above checks, add it to the array of packages to process. if(!bExclude) { PackageFileNamesToProcess.Add(PackageFile); } } } if ( PackageFileNamesToProcess.Num() == 0 ) { UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("No files found or none passed the include/exclude criteria.")); } bool bSkipGatherCache = FParse::Param(FCommandLine::Get(), TEXT("SkipGatherCache")); if (!bSkipGatherCache) { GetBoolFromConfig(*SectionName, TEXT("SkipGatherCache"), bSkipGatherCache, GatherTextConfigPath); } UE_LOG(LogGatherTextFromAssetsCommandlet, Log, TEXT("SkipGatherCache: %s"), bSkipGatherCache ? TEXT("true") : TEXT("false")); TArray< FString > PackageFileNamesToLoad; for (FString& PackageFile : PackageFileNamesToProcess) { TScopedPointer< FArchive > FileReader( IFileManager::Get().CreateFileReader( *PackageFile ) ); if( FileReader ) { // Read package file summary from the file FPackageFileSummary PackageFileSummary; (*FileReader) << PackageFileSummary; bool MustLoadForGather = false; // Have we been asked to skip the cache of text that exists in the header of newer packages? if (bSkipGatherCache && PackageFileSummary.GetFileVersionUE4() >= VER_UE4_SERIALIZE_TEXT_IN_PACKAGES) { // Fallback on the old package flag check. if (PackageFileSummary.PackageFlags & PKG_RequiresLocalizationGather) { MustLoadForGather = true; } } const FCustomVersion* const EditorVersion = PackageFileSummary.GetCustomVersionContainer().GetVersion(FEditorObjectVersion::GUID); // Packages not resaved since localization gathering flagging was added to packages must be loaded. if (PackageFileSummary.GetFileVersionUE4() < VER_UE4_PACKAGE_REQUIRES_LOCALIZATION_GATHER_FLAGGING) { MustLoadForGather = true; } // Package not resaved since gatherable text data was added to package headers must be loaded, since their package header won't contain pregathered text data. else if (PackageFileSummary.GetFileVersionUE4() < VER_UE4_SERIALIZE_TEXT_IN_PACKAGES) { // Fallback on the old package flag check. if (PackageFileSummary.PackageFlags & PKG_RequiresLocalizationGather) { MustLoadForGather = true; } } else if (PackageFileSummary.GetFileVersionUE4() < VER_UE4_DIALOGUE_WAVE_NAMESPACE_AND_CONTEXT_CHANGES) { IAssetRegistry& AssetRegistry = AssetRegistryModule.Get(); TArray<FAssetData> AssetDataInPackage; AssetRegistry.GetAssetsByPackageName(*FPackageName::FilenameToLongPackageName(PackageFile), AssetDataInPackage); for (const FAssetData& AssetData : AssetDataInPackage) { if (AssetData.AssetClass == UDialogueWave::StaticClass()->GetFName()) { MustLoadForGather = true; } } } // Add package to list of packages to load fully and process. if (MustLoadForGather) { PackageFileNamesToLoad.Add(PackageFile); } // Process immediately packages that don't require loading to process. else if (PackageFileSummary.GatherableTextDataOffset > 0) { FileReader->Seek(PackageFileSummary.GatherableTextDataOffset); TArray<FGatherableTextData> GatherableTextDataArray; GatherableTextDataArray.SetNum(PackageFileSummary.GatherableTextDataCount); for (int32 GatherableTextDataIndex = 0; GatherableTextDataIndex < PackageFileSummary.GatherableTextDataCount; ++GatherableTextDataIndex) { (*FileReader) << GatherableTextDataArray[GatherableTextDataIndex]; } ProcessGatherableTextDataArray(PackageFile, GatherableTextDataArray); } } } CollectGarbage(RF_NoFlags); //Now go through the remaining packages in the main array and process them in batches. int32 PackagesPerBatchCount = 100; TArray< UPackage* > LoadedPackages; TArray< FString > LoadedPackageFileNames; TArray< FString > FailedPackageFileNames; TArray< UPackage* > LoadedPackagesToProcess; const int32 PackageCount = PackageFileNamesToLoad.Num(); const int32 BatchCount = PackageCount / PackagesPerBatchCount + (PackageCount % PackagesPerBatchCount > 0 ? 1 : 0); // Add an extra batch for any remainder if necessary if(PackageCount > 0) { UE_LOG(LogGatherTextFromAssetsCommandlet, Log, TEXT("Loading %i packages in %i batches of %i."), PackageCount, BatchCount, PackagesPerBatchCount); } FLoadPackageLogOutputRedirector LogOutputRedirector; //Load the packages in batches int32 PackageIndex = 0; for( int32 BatchIndex = 0; BatchIndex < BatchCount; ++BatchIndex ) { int32 PackagesInThisBatch = 0; int32 FailuresInThisBatch = 0; for( ; PackageIndex < PackageCount && PackagesInThisBatch < PackagesPerBatchCount; ++PackageIndex ) { FString PackageFileName = PackageFileNamesToLoad[PackageIndex]; UE_LOG(LogGatherTextFromAssetsCommandlet, Verbose, TEXT("Loading package: '%s'."), *PackageFileName); UPackage *Package = nullptr; { FString LongPackageName; if (!FPackageName::TryConvertFilenameToLongPackageName(PackageFileName, LongPackageName)) { LongPackageName = FPaths::GetCleanFilename(PackageFileName); } FLoadPackageLogOutputRedirector::FScopedCapture ScopedCapture(&LogOutputRedirector, LongPackageName); Package = LoadPackage( NULL, *PackageFileName, LOAD_NoWarn | LOAD_Quiet ); } if( Package ) { LoadedPackages.Add(Package); LoadedPackageFileNames.Add(PackageFileName); // Because packages may not have been resaved after this flagging was implemented, we may have added packages to load that weren't flagged - potential false positives. // The loading process should have reflagged said packages so that only true positives will have this flag. if( Package->RequiresLocalizationGather() ) { LoadedPackagesToProcess.Add( Package ); } } else { FailedPackageFileNames.Add( PackageFileName ); ++FailuresInThisBatch; continue; } ++PackagesInThisBatch; } UE_LOG(LogGatherTextFromAssetsCommandlet, Log, TEXT("Loaded %i packages in batch %i of %i. %i failed."), PackagesInThisBatch, BatchIndex + 1, BatchCount, FailuresInThisBatch); ProcessPackages(LoadedPackagesToProcess); LoadedPackagesToProcess.Empty(PackagesPerBatchCount); if( bFixBroken ) { for( int32 LoadedPackageIndex=0; LoadedPackageIndex < LoadedPackages.Num() ; ++LoadedPackageIndex ) { UPackage *Package = LoadedPackages[LoadedPackageIndex]; const FString PackageName = LoadedPackageFileNames[LoadedPackageIndex]; //Todo - link with source control. if( Package ) { if( Package->IsDirty() ) { if( SavePackageHelper( Package, *PackageName ) ) { UE_LOG(LogGatherTextFromAssetsCommandlet, Log, TEXT("Saved Package %s."),*PackageName); } else { //TODO - Work out how to integrate with source control. The code from the source gatherer doesn't work. UE_LOG(LogGatherTextFromAssetsCommandlet, Log, TEXT("Could not save package %s. Probably due to source control. "),*PackageName); } } } else { UE_LOG(LogGatherTextFromAssetsCommandlet, Warning, TEXT("Failed to find one of the loaded packages.")); } } } CollectGarbage(RF_NoFlags); LoadedPackages.Empty(PackagesPerBatchCount); LoadedPackageFileNames.Empty(PackagesPerBatchCount); } return 0; }
int main(void){ ListOfProcesses *LReady; ListOfProcesses *LBlock; ListOfProcesses *LFinished; Memory *Mmemory; FILE *pEntry; FILE *pOut; int i; int counter = 0; int retRunning; int TamReady = 0; tempoTotal = 0; pOut = fopen("OutputFIRST.txt", "w"); if (pOut == NULL){ printf("Erro ao tentar abrir o arquivo!\n"); exit(1); } pEntry = fopen("teste.txt", "r"); if (pEntry == NULL){ printf("Erro ao tentar abrir o arquivo!\n"); exit(1); } LReady = FileReader(pEntry, &TamReady); Mmemory = MemoryCreator(); LBlock = BlockCreator(TamReady); LFinished = FinishedCreator(TamReady); Mmemory->LMemory->quantity = 0; for (i = 0; i < LReady->quantity; i++){ PrintProcess(&(LReady->proc[i]), pOut); } while ((Mmemory->LMemory->quantity + LReady->quantity + LBlock->quantity) != 0){ while ((CheckMemory(Mmemory) >= LReady->proc[0].Memory) && (LReady->proc[0].order != -1)){ ReadyToMemory(LReady, Mmemory); OrganizeList(LReady); fprintf(pOut, "\n-----------------------Mapa de Alocação de Memória-----------------------\n"); PrintMemory(Mmemory, pOut); fprintf(pOut, "-----------------------Fim Mapa de Alocação de Memória-----------------------\n"); } counter++; retRunning = RunningProcess(Mmemory, LBlock, pOut); BlockToReady(LReady, LBlock); ProcessToEverywhere(Mmemory, LBlock, LReady, LFinished, retRunning); PrintMemory(Mmemory, pOut); fprintf(pOut, "\n-----------------------Processos na Fila de Prontos-----------------------\n"); PrintListOfProcesses(LReady, pOut); fprintf(pOut, "\n-----------------------Processos em IO-----------------------\n"); PrintListOfProcesses(LBlock, pOut); fprintf(pOut, "\n-----------------------Processos Já Terminados-----------------------\n"); PrintListOfProcesses(LFinished, pOut); if ((Mmemory->LMemory->quantity == 0) && (LReady->quantity == 0) && (LBlock->quantity != 0)){ for (i = 0; i < LBlock->quantity; i++){ while (LBlock->proc[i].TimeIO[0] != 0){ ExecutaBloqueados(LBlock); Mmemory->LMemory->quantity = 1; } LBlock->proc[i].order = -1; LBlock->quantity = LBlock->quantity - 1; } } } printf("Tempo Decorrido: %ds\n", tempoTotal); fprintf(pOut, "Tempo Total Decorrido: %d\n", tempoTotal); printf("Counter: %d\n", counter); fprintf(pOut, "Numero de iterações do while: %d\n", counter); free(LReady->proc); free(LBlock->proc); free(LFinished->proc); free(LReady); free(LBlock); free(LFinished); free(Mmemory->LMemory->proc); free(Mmemory->LMemory); free(Mmemory); fclose(pEntry); fclose(pOut); return 0; }