int main(int argc, char* argv[]) { Env = TEnv(argc, argv, TNotify::StdNotify); Env.PrepArgs(TStr::Fmt("Rolx. build: %s, %s. Time: %s", __TIME__, __DATE__, TExeTm::GetCurTm())); TExeTm ExeTm; Try const TStr InFNm = Env.GetIfArgPrefixStr("-i:", "graph.txt", "Input graph (one edge per line, tab/space separated)"); const TStr OutFNm = Env.GetIfArgPrefixStr("-o:", "roles.txt", "Output file name prefix"); const int MinRoles = Env.GetIfArgPrefixInt("-l:", 2, "Lower bound of the number of roles"); const int MaxRoles = Env.GetIfArgPrefixInt("-u:", 3, "Upper bound of the number of roles"); double Threshold = 1e-6; if (MinRoles > MaxRoles || MinRoles < 2) { printf("min roles and max roles should be integer and\n"); printf("2 <= min roles <= max roles\n"); exit(EXIT_SUCCESS); } printf("loading file...\n"); PNGraph Graph = TSnap::LoadEdgeList<PNGraph>(InFNm, 0, 1); printf("extracting features...\n"); TIntFtrH Features = ExtractFeatures(Graph); TIntIntH NodeIdMtxIdH = CreateNodeIdMtxIdxHash(Features); TFltVV V = ConvertFeatureToMatrix(Features, NodeIdMtxIdH); //printf("saving features...\n"); //FPrintMatrix(V, "v.txt"); printf("feature matrix is saved in v.txt\n"); TFlt MnError = TFlt::Mx; TFltVV FinalG, FinalF; int NumRoles = -1; for (int r = MinRoles; r <= MaxRoles; ++r) { TFltVV G, F; printf("factorizing for %d roles...\n", r); CalcNonNegativeFactorization(V, r, G, F, Threshold); //FPrintMatrix(G, "g.txt"); //FPrintMatrix(F, "f.txt"); TFlt Error = CalcDescriptionLength(V, G, F); if (Error < MnError) { MnError = Error; FinalG = G; FinalF = F; NumRoles = r; } } //FPrintMatrix(FinalG, "final_g.txt"); //FPrintMatrix(FinalF, "final_f.txt"); printf("using %d roles, min error: %f\n", NumRoles, MnError()); TIntIntH Roles = FindRoles(FinalG, NodeIdMtxIdH); FPrintRoles(Roles, OutFNm); //PlotRoles(Graph, Roles); Catch printf("\nrun time: %s (%s)\n", ExeTm.GetTmStr(), TSecTm::GetCurTm().GetTmStr().CStr()); return 0; }
void NetworkTemporal3BCPNN::TrainLayer(const vector<vector<float> >& trainingData, PopulationColumns* inputLayer, StructureMIMDSVQ* structure, int iterationsCorrs, int iterationsMDS, int iterationsVQ, int iterationsFeatures) { // Training phase int nrTrainImages = trainingData.size(); structure->MDSHypercolumns()->SwitchOnOff(false); structure->MDS()->SwitchOnOff(false); structure->VQ()->SwitchOnOff(false); structure->CSLLearn()->SwitchOnOff(false); structure->GetLayer(1)->SwitchOnOff(false); structure->SetRecording(false); // Semi-sequential version // 1. Training phase // 1A. Patches creation structure->CSLLearn()->SetMaxPatterns(nrTrainImages); structure->CSLLearn()->SetEta(0.001); // turn of response in 2nd layer during initial training phase for speed structure->GetLayer(1)->SwitchOnOff(false); int j=0; while(j<iterationsCorrs) { //if(j==(int)iterationsPatches*0.8) // break; //for(int i=0;i<trainingData.size();i++) { ComputeCorrelation(trainingData,inputLayer,structure,j); j++; } j=0; while(j<iterationsMDS) { //if(j==(int)iterationsPatches*0.9) // break; //for(int i=0;i<trainingData.size();i++) { if(!ComputeMDS(trainingData,inputLayer,structure,j)) break; j++; } structure->GetLayer(1)->SwitchOnOff(true); j=0; while(j<iterationsVQ) { //for(int i=0;i<trainingData.size();i++) { if(m_verbose && this->MPIGetNodeId() == 0) cout << "DataPoint (patches): " << j <<endl; ComputeVQ(trainingData,inputLayer,structure,j); j++; } structure->CSLLearn()->SetMaxPatterns(nrTrainImages); j=0; while(j<iterationsFeatures) { //for(int i=0;i<trainingData.size();i++) { if(m_verbose && this->MPIGetNodeId() == 0) cout << "DataPoint (features): " << j <<endl; if(!ExtractFeatures(trainingData,inputLayer,structure)) break; //} j++; } this->RecordAll(); }