static void dumpStatements( void ) { /**********************************/ /* loop through statements and create new sru file by dumping them out */ FILE *fout; statement *curr; /* open file */ fout = WigOpenFile( GetOutputFile(), "wt" ); if( !fout ) { Error( FILE_OPEN_ERR, GetOutputFile() ); } /* loop and udmp */ curr = SRU.head_stmt; while( curr ) { assert( curr->stmt ); fprintf( fout, "%s", curr->stmt ); SRU.head_stmt = curr->next; if( curr->keep ) { curr = curr->next; } else { /* free if not required for back end */ freeStatement( curr ); curr = SRU.head_stmt; } } WigCloseFile( fout ); }
bool UParticleSystemAuditCommandlet::DumpSimpleSet(TSet<FString>& InSet, const TCHAR* InShortFilename, const TCHAR* InObjectClassName) { if (InSet.Num() > 0) { check(InShortFilename != NULL); check(InObjectClassName != NULL); FArchive* OutputStream = GetOutputFile(InShortFilename); if (OutputStream != NULL) { UE_LOG(LogParticleSystemAuditCommandlet, Log, TEXT("Dumping '%s' results..."), InShortFilename); OutputStream->Logf(TEXT("%s,..."), InObjectClassName); for (TSet<FString>::TIterator DumpIt(InSet); DumpIt; ++DumpIt) { FString ObjName = *DumpIt; OutputStream->Logf(TEXT("%s"), *ObjName); } OutputStream->Close(); delete OutputStream; } else { return false; } } return true; }
bool EclipseThemeImporterBase::FinalizeImport(wxXmlNode* propertiesNode) { AddCommonProperties(propertiesNode); wxString codeliteXmlFile = wxFileName(clStandardPaths::Get().GetUserLexersDir(), GetOutputFile(m_langName)).GetFullPath(); // Update the lexer colours LexerConf::Ptr_t lexer(new LexerConf); lexer->FromXml(m_codeliteDoc.GetRoot()); ColoursAndFontsManager::Get().UpdateLexerColours(lexer, true); wxXmlNode* xmlnode = lexer->ToXml(); m_codeliteDoc.SetRoot(xmlnode); // Save the lexer to xml return ::SaveXmlToFile(&m_codeliteDoc, codeliteXmlFile); }
// build generator - needs uniqe name (checked by ModelBuilder) // and optional count, which defaults to special "all rows value" Generator * GeneratorTag :: FromXML( const ALib::XMLElement * e ) { RequireChildren( e ); AllowAttrs( e, AttrList( NAME_ATTR, COUNT_ATTRIB, GROUP_ATTR, DEBUG_ATTRIB, HIDE_ATTR, OUT_ATTRIB, FNAMES_ATTR, 0 ) ); string name = e->HasAttr( NAME_ATTR) ? e->AttrValue( NAME_ATTR ) : ""; int count = GetCount( e ); bool debug = GetBool( e, DEBUG_ATTRIB, NO_STR ); FieldList grp( e->AttrValue( GROUP_ATTR, "" )); string ofn = GetOutputFile( e ); string fields = e->AttrValue( FNAMES_ATTR, "" ); std::auto_ptr <GeneratorTag> g( new GeneratorTag( name, count, debug, ofn, fields, grp ) ); g->AddSources( e ); return g.release(); }
void EdiComposer::BufferFlush() { GetOutputFile().Write(GetBuffer(), m_bufferSize); GetOutputFile().Write((void*)"\r\n", 2); };
int main(int argc, char **argv) { srand (time(NULL)); std::string xmlDocFile = "/home/elli/Documents/colorferet/dvd1/data/ground_truths/xml/recordings.xml"; std::string basePath = "/home/elli/Documents/colorferet/"; std::ofstream* outputTrainingFilestream = GetOutputFile(true); std::ofstream* outputTestFilestream = GetOutputFile(false); pugi::xml_document doc; if (!doc.load_file(xmlDocFile.c_str())) { std::cerr << "Failed to load XML doc \'" << xmlDocFile << "\'" << std::endl; exit(-1); } std::list<Recording> recordingList; pugi::xml_node recordingsNode = doc.child("Recordings"); pugi::xml_object_range<pugi::xml_named_node_iterator> recordingsIterator = doc.child("Recordings").children("Recording"); for (pugi::xml_named_node_iterator it = recordingsIterator.begin(); it != recordingsIterator.end(); it++) { std::string fileRoot = (*it).child("URL").attribute("root").value(); std::string filePathWithExtension = (*it).child("URL").attribute("relative").value(); std::string filePath = filePathWithExtension.substr(0, filePathWithExtension.length()-4); std::string subject = (*it).child("Subject").attribute("id").value(); Recording newRecording = Recording(subject, basePath, fileRoot, filePath); if (newRecording.GetSubjectId() < 5) { // to keep this small only going to kee the forst 45 ppl recordingList.push_back(newRecording); } } int width = 32; int height = 48; std::cout << recordingList.size() << std::endl; cv::Size desiredImageSize (width, height); int counter = 0; int trainingCount = 0; int testingCount = 0; int totalCounts = recordingList.size()*recordingList.size(); int proposedTrainingCount = (double) totalCounts * 0.7; std::cout << proposedTrainingCount <<std::endl; int proposedTestingCount = totalCounts-proposedTrainingCount; (*outputTestFilestream) << proposedTestingCount << " " << width*height*2 << " " << 1 << std::endl; (*outputTrainingFilestream) << proposedTrainingCount << " " << width*height*2 << " " << 1 << std::endl; (*outputTestFilestream).setf(std::ios::fixed); (*outputTestFilestream) << std::setprecision(3); (*outputTrainingFilestream).setf(std::ios::fixed); (*outputTrainingFilestream) << std::setprecision(3); for (std::list<Recording>::iterator itOuter = recordingList.begin(); itOuter != recordingList.end(); itOuter++) { counter++; if(counter%10 == 0) { std::cout << counter << std::endl; } cv::Mat currentImage = cv::imread(itOuter->GetFilePath(), cv::IMREAD_GRAYSCALE); cv::Mat resizedCurrentImage; cv::resize(currentImage, resizedCurrentImage, desiredImageSize, 0, 0, CV_INTER_AREA); //std::cout << "Current Size: " << resizedCurrentImage.size() << std::endl; for (std::list<Recording>::iterator itInner = recordingList.begin(); itInner != recordingList.end(); itInner++) { cv::Mat currentInnerImage = cv::imread(itInner->GetFilePath(), cv::IMREAD_GRAYSCALE); cv::Mat resizedInnerImage; cv::resize(currentInnerImage, resizedInnerImage, desiredImageSize, 0, 0, CV_INTER_AREA); std::ofstream* outputFilestream; double myRand = ((double) rand() / (RAND_MAX)); if ((myRand < 0.7 && trainingCount < proposedTrainingCount)|| testingCount >=proposedTestingCount) { outputFilestream = outputTrainingFilestream; trainingCount++; } else { outputFilestream = outputTestFilestream; testingCount++; } for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { cv::Scalar intensity = resizedCurrentImage.at<uchar>(x,y); (*outputFilestream) << (double) intensity[0]/255 << " "; } } for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { cv::Scalar intensity = resizedInnerImage.at<uchar>(x,y); (*outputFilestream) << (double) intensity[0]/255 << " "; } } if (itOuter->GetSubjectId() == itInner->GetSubjectId()) { (*outputFilestream) << 1; } else { (*outputFilestream) << 0; } (*outputFilestream) << std::endl; } } std::cout << trainingCount << " " << testingCount << std::endl; outputTestFilestream->close(); outputTrainingFilestream->close(); }
int main(int argc, char **argv) { std::cout << "Hello, welcome to NeuralNetwork Testing program!!" << std::endl; ThreeLayerNetwork initialNetwork = ThreeLayerNetwork::GetNewNetworkFromFile(); //LoadInitialNetwork(); ExampleContainer testExamples = LoadTestingExamples(); std::fstream* outputFile = GetOutputFile(); (*outputFile).setf(std::ios::fixed); (*outputFile) << std::setprecision(3); std::list<Example> examples = testExamples.GetExamples(); std::vector<int> A (testExamples.GetOutputSize(), 0); // expected 1, predicted 1 std::vector<int> B (testExamples.GetOutputSize(), 0); // expected 0, predicted 1 std::vector<int> C (testExamples.GetOutputSize(), 0); // expected 1, predicted 0 std::vector<int> D (testExamples.GetOutputSize(), 0); // expected 0, predicted 0 std::list<Example>::iterator ex; for (ex = examples.begin(); ex != examples.end(); ex++) { initialNetwork.PropogateForward((*ex)); std::vector<bool> outputs = initialNetwork.GetOutputs(); std::vector<bool> expectedOutputs = ex->GetOutputs(); if (outputs.size() != expectedOutputs.size()) { std::cerr << "Something weird happened - expectedOutputs size is not the same as the outputs size - this should have been ensured elsewhere!" << std::endl; exit(-1); } for (int i = 0; i < outputs.size(); i++) { if (expectedOutputs[i] == true) //expected 1 { if (outputs[i] == true) // predicted 1 { A[i]++; } else // predicted 0 { C[i]++; } } else // expected 0 { if (outputs[i] == true) // predicted 1 { B[i]++; } else // predicted 0 { D[i]++; } } } } std::vector<double> OverallAccuracy (testExamples.GetOutputSize(), 0); std::vector<double> Precision (testExamples.GetOutputSize(), 0); std::vector<double> Recall (testExamples.GetOutputSize(), 0); std::vector<double> F1 (testExamples.GetOutputSize(), 0); for (int i = 0; i < testExamples.GetOutputSize(); i++) { OverallAccuracy[i] = ((double) A[i] + D[i])/(A[i] + B[i] + C[i] + D[i]); Precision[i] = (double) A[i]/(A[i] + B[i]); Recall[i] = (double) A[i]/(A[i] + C[i]); F1[i] = (2*Precision[i]*Recall[i])/(Precision[i] + Recall[i]); } for (int i = 0; i < testExamples.GetOutputSize(); i++) { (*outputFile) << A[i] << " " << B[i] << " " << C[i] << " " << D[i] << " " << OverallAccuracy[i] << " " << Precision[i] << " " << Recall[i] << " " << F1[i] << std::endl; } int microA = std::accumulate(A.begin(), A.end(), 0); int microB = std::accumulate(B.begin(), B.end(), 0); int microC = std::accumulate(C.begin(), C.end(), 0); int microD = std::accumulate(D.begin(), D.end(), 0); double microOverallAccuracy = ((double) microA + microD)/(microA + microB + microC + microD); double microPrecision = (double) microA/(microA + microB); double microRecall = (double) microA/(microA + microC); double microF1 = (2*microPrecision*microRecall)/(microPrecision + microRecall); std::cout << std::accumulate(OverallAccuracy.begin(), OverallAccuracy.end(), 0) << std::endl; double macroOverallAccuracy = std::accumulate(OverallAccuracy.begin(), OverallAccuracy.end(), 0.0)/ (double) OverallAccuracy.size(); double macroPrecision = std::accumulate(Precision.begin(), Precision.end(), 0.0)/ (double) Precision.size(); double macroRecall = std::accumulate(Recall.begin(), Recall.end(), 0.0)/ (double) Recall.size(); double macroF1 = (2*macroPrecision*macroRecall)/(macroPrecision + macroRecall); (*outputFile) << microOverallAccuracy << " " << microPrecision << " " << microRecall << " " << microF1 << std::endl; (*outputFile) << macroOverallAccuracy << " " << macroPrecision << " " << macroRecall << " " << macroF1 << std::endl; outputFile->close(); }