bool CommandLineArgument::ParseCommandLineArgument(int argc, char** argv) { if (!ParseCommandLine(argc, argv)) { return false; } action_parameters_ = working_folder + "\\action.txt"; action_parameters_debug_ = working_folder + "\\action_debug.txt"; action_parameters_png_ = working_folder + "\\action.png"; grammar_ = working_folder + "\\grammar.json"; /*grid_header_ = working_folder + "\\grid_header.txt"; grid_wall_png_ = working_folder + "\\grid_wall.png"; grid_wall_txt_ = working_folder + "\\grid_wall.txt"; grid_window_png_ = working_folder + "\\grid_window.png"; grid_window_txt_ = working_folder + "\\grid_window.txt";*/ grid_ = working_folder + "\\grid_"; original_result_overlap_png_ = working_folder + "\\compare_result.png"; parameters_ = working_folder + "\\configure.json"; pt_pcd_ = working_folder + "\\pt.pcd"; qtable_ = working_folder + "\\qtable.txt"; raw_pcd_ = working_folder + "\\raw.pcd"; raw_txt_ = working_folder + "\\raw.txt"; result_png_ = working_folder + "\\result.png"; result_txt_ = working_folder + "\\result.txt"; /*revise_grid_header_ = working_folder + "\\revise_grid_header.txt"; revise_grid_wall_png_ = working_folder + "\\revise_grid_wall.png"; revise_grid_wall_txt_ = working_folder + "\\revise_grid_wall.txt"; revise_grid_window_png_ = working_folder + "\\revise_grid_window.png"; revise_grid_window_txt_ = working_folder + "\\revise_grid_window.txt";*/ revise_grid_ = working_folder + "\\revise_grid_"; rt_pcd_ = working_folder + "\\rt.pcd"; if (!ReadParameters()) { return false; } else { return true; } }
SparseHieroReorderingFeature::SparseHieroReorderingFeature(const std::string &line) :StatelessFeatureFunction(0, line), m_type(SourceCombined), m_sourceFactor(0), m_targetFactor(0), m_sourceVocabFile(""), m_targetVocabFile("") { /* Configuration of features. factor - Which factor should it apply to type - what type of sparse reordering feature. e.g. block (modelled on Matthias Huck's EAMT 2012 features) word - which words to include, e.g. src_bdry, src_all, tgt_bdry , ... vocab - vocab file to limit it to orientation - e.g. lr, etc. */ cerr << "Constructing a Sparse Reordering feature" << endl; ReadParameters(); m_otherFactor = FactorCollection::Instance().AddFactor("##OTHER##"); LoadVocabulary(m_sourceVocabFile, m_sourceVocab); LoadVocabulary(m_targetVocabFile, m_targetVocab); }
void QualityOfServiceData::Init() { ReadParameters(); if (!IsStandalone()) { Dob::Typesystem::TypeIdVector typeIds = Safir::Dob::Typesystem::Operations::GetAllTypeIds(); for (Dob::Typesystem::TypeIdVector::iterator it = typeIds.begin(); it != typeIds.end(); ++it) { if (Dob::Typesystem::Operations::IsOfType(*it,Safir::Dob::Entity::ClassTypeId) || Dob::Typesystem::Operations::IsOfType(*it,Safir::Dob::Service::ClassTypeId) || Dob::Typesystem::Operations::IsOfType(*it,Safir::Dob::Message::ClassTypeId) || Dob::Typesystem::Operations::IsOfType(*it,Safir::Dob::Response::ClassTypeId)) { m_QoSTable.insert(QoSTable::value_type(*it,QoSData(GetDistributionChannel(*it), GetPriority(*it)))); } } } }
// Reads the scene file and stores the information as PolygonGroup and Polygon structures bool ReadSceneFile(void) { FILE *SceneFile; // File pointer for the scene file PolygonGroup *Head = NULL; // Head of the linked list of polygon groups int nPolygons = 0; // Number of polygons read from the scene file Coordinate MaxCoordinates; // The maximum coordinates observed in the scene // Attempt to open the scene file if (fopen_s(&SceneFile, SceneFilename, "r") != 0) { printf("Could not open scene file '%s'\n", SceneFilename); return false; } else { // Flags used whilst reading scene file data bool EndOfFile; bool ReadingParameters; bool SuccessfulRead; bool FirstGroup; char *Buffer; // Text buffer for reading strings from the scene file char *Context = NULL; // Used for getting tokens from the buffer string char *Parameter; // Stores the identifier letter at the start of each scene file string char *Comment; // Used in removing comments from strings int LineCount; // Used for monitoring the position of errors in the scene file Coordinate CoordinateBuffer; // Stores coordinates read from the buffer Coordinate Offset; // Stores the current coordinate offset PolygonGroup *PolygonGroupBuffer; // Stores polygon group parameters as they are read from the file Polygon *PolygonBuffer; // Stores polygon parameters as they are read from the file Polygon **PolygonListTail; // Tail of the linked list of polygons // Allocate memory for a new polygon group and the coordinate buffer PolygonGroupBuffer = NewPolygonGroup(NULL); PolygonListTail = &PolygonGroupBuffer->PolygonList; // Initiate the buffers CoordinateBuffer.X = 0; CoordinateBuffer.Y = 0; CoordinateBuffer.Z = 0; Offset.X = 0; Offset.Y = 0; Offset.Z = 0; // Allocate memory for the buffer Buffer = (char*) malloc(200*sizeof(char)); // Initialise the flags EndOfFile = false; ReadingParameters = false; SuccessfulRead = true; FirstGroup = true; // Initialise the line count to 0 LineCount = 0; printf("\nReading polygon information from scene file '%s'\n\n", SceneFilename); // Read the scene file, one line at a time do { if (fgets(Buffer, 200, SceneFile) != NULL) { // Keep track of the line number LineCount++; // Search for and remove comments Comment = strstr(Buffer, "//"); if (Comment != NULL) { Comment[0] = NULL; // Replace with 0 to mark a new end of the string } // Remove leading whitespace and read the parameter type character (p,t,z,o,h,v) Parameter = strtok_s(Buffer, "\t ", &Context); if (Parameter != NULL) { switch (Parameter[0]) { // Read the electrical parameters case 'p': // Set the reading parameters flag, create a new polygon group and add the old one to the list ReadingParameters = true; // Add the polygon buffer to the list if this is not the first read if (FirstGroup == true) { FirstGroup = false; } else { AddPolygonGroupToList(&Head, PolygonGroupBuffer); PolygonGroupBuffer = NewPolygonGroup(PolygonGroupBuffer); PolygonListTail = &PolygonGroupBuffer->PolygonList; // FIXME: Should the offset be reset upon reading a 'p'? //Offset.X = 0; //Offset.Y = 0; //Offset.Z = 0; } SuccessfulRead = ReadParameters(&Context, PolygonGroupBuffer); break; // Read the wall thickness case 't': // If not currently reading parameters (i.e. not just read a p, z or t) create a new polygon group and add the old one to the list if (ReadingParameters == false) { ReadingParameters = true; AddPolygonGroupToList(&Head, PolygonGroupBuffer); PolygonGroupBuffer = NewPolygonGroup(PolygonGroupBuffer); PolygonListTail = &PolygonGroupBuffer->PolygonList; // FIXME: Should the offset be reset upon reading a 't'? //Offset.X = 0; //Offset.Y = 0; //Offset.Z = 0; } SuccessfulRead = ReadThickness(&Context, PolygonGroupBuffer); break; // Read the priority case 'z': // If not currently reading parameters (i.e. not just read a p, z or t) create a new polygon group and add the old one to the list if (ReadingParameters == false) { ReadingParameters = true; AddPolygonGroupToList(&Head, PolygonGroupBuffer); PolygonGroupBuffer = NewPolygonGroup(PolygonGroupBuffer); PolygonListTail = &PolygonGroupBuffer->PolygonList; // FIXME: Should the offset be reset upon reading a 'z'? //Offset.X = 0; //Offset.Y = 0; //Offset.Z = 0; } SuccessfulRead = ReadPriority(&Context, PolygonGroupBuffer); break; // Read the offset case 'o': SuccessfulRead = ReadCoordinates(&Context, &Offset); break; // Read a vertical polygon case 'v': // Ensure any previous polygons were also vertical if (ReadingParameters == false && PolygonGroupBuffer->Type != Vertical) { AddPolygonGroupToList(&Head, PolygonGroupBuffer); PolygonGroupBuffer = NewPolygonGroup(PolygonGroupBuffer); PolygonListTail = &PolygonGroupBuffer->PolygonList; } ReadingParameters = false; PolygonGroupBuffer->Type = Vertical; // Allocate memory for a new polygon structure with 2 vertices PolygonBuffer = NewPolygon(); PolygonBuffer->Vertices = (Coordinate*)malloc(2*sizeof(Coordinate)); // Read in 2 sets of coordinates while (ReadCoordinates(&Context, &CoordinateBuffer) == true) { // Increment the number of vertices and ensure only 2 are read for a vertical polygon if (PolygonBuffer->nVertices < 2) { PolygonBuffer->Vertices[PolygonBuffer->nVertices].X = CoordinateBuffer.X + Offset.X; PolygonBuffer->Vertices[PolygonBuffer->nVertices].Y = CoordinateBuffer.Y + Offset.Y; PolygonBuffer->Vertices[PolygonBuffer->nVertices].Z = CoordinateBuffer.Z + Offset.Z; } PolygonBuffer->nVertices++; } // Ensure both both vertices have been read if (PolygonBuffer->nVertices != 2) { SuccessfulRead = false; } else { nPolygons++; *PolygonListTail = PolygonBuffer; PolygonListTail = &(*PolygonListTail)->NextPolygon; } break; // Read a horizontal polygon case 'h': // Ensure any previous polygons were also horizontal if (ReadingParameters == false && PolygonGroupBuffer->Type != Horizontal) { AddPolygonGroupToList(&Head, PolygonGroupBuffer); PolygonGroupBuffer = NewPolygonGroup(PolygonGroupBuffer); PolygonListTail = &PolygonGroupBuffer->PolygonList; } ReadingParameters = false; PolygonGroupBuffer->Type = Horizontal; // Allocate memory for a new polygon structure with 2 vertices PolygonBuffer = NewPolygon(); PolygonBuffer->Vertices = (Coordinate*)malloc(50*sizeof(Coordinate)); // Read in at least 3 sets of coordinates while (ReadCoordinates(&Context, &CoordinateBuffer) == true) { // Increment the number of vertices and ensure there is enough memory for all of the vertices, reallocating 10 vertices at a time PolygonBuffer->Vertices[PolygonBuffer->nVertices].X = CoordinateBuffer.X + Offset.X; PolygonBuffer->Vertices[PolygonBuffer->nVertices].Y = CoordinateBuffer.Y + Offset.Y; PolygonBuffer->Vertices[PolygonBuffer->nVertices].Z = CoordinateBuffer.Z + Offset.Z; PolygonBuffer->nVertices++; } // Ensure at least 3 vertices have been read if (PolygonBuffer->nVertices < 3) { SuccessfulRead = false; } else { nPolygons++; *PolygonListTail = PolygonBuffer; PolygonListTail = &(*PolygonListTail)->NextPolygon; } PolygonBuffer->Vertices = (Coordinate*)realloc(PolygonBuffer->Vertices, PolygonBuffer->nVertices*sizeof(Coordinate)); break; } } } else if (feof(SceneFile) != 0) { EndOfFile = true; } } while (EndOfFile == false && SuccessfulRead == true); // Add the final group to the list AddPolygonGroupToList(&Head, PolygonGroupBuffer); // Free allocated memory free(Buffer); fclose(SceneFile); // Check to see if there were any errors in the scene file if (SuccessfulRead == false) { printf("Error in scene file, line %d\n", LineCount); // Free memory allocated to the polygons FreePolygonGroupList(Head); return false; } else { printf("Scene file parsed successfully\nRead %d polygons\n\n", nPolygons); } } // Find the maximum coordinates in the system and allocate enough memory for a rectangle of this size MaxCoordinates = FindMaxSize(Head); // Print information on the polygons to the display if (InputData.DisplayPolygonInformation.Flag == true) { PrintPolygonGroupList(Head); } // Allocate memory for the TLM grid AllocateGridMemory(MaxCoordinates); // Add the polygons into the grid AddPolygonsToGrid(Head); // Free memory allocated to the polygons FreePolygonGroupList(Head); // Calculate the reflection and transmission coefficients based on their impedances CalculateReflectionTransmissionCoefficients(); return true; }
InputWeightFF::InputWeightFF(const std::string &line) : StatelessFeatureFunction(1, line) { ReadParameters(); }
static void ProcessNMEA(void) // process a valid NMEA that got to the console { #ifdef WITH_CONFIG if(NMEA.isPOGNS()) ReadParameters(); #endif }
PhrasePenalty::PhrasePenalty(const std::string &line) : StatelessFeatureFunction("PhrasePenalty",1, line) { ReadParameters(); }
int main(int argc, char **argv) { if(argc==1) PrintHelp(); scaledCost = 0; time_t startTime; srand(time(&startTime)); outputFilename = (char *)"output.txt"; functionMapping = (char *)"UABCDEFGHIJKLMNOPQRSTVWXYZ";//"UEGMPTBFOARDC"; sizeCutoff = 1; densityCutoff=0; pCutoff=1.000001; ReadParameters(argc, argv); printf("Graph filename: %s\n",graphFilename); printf("Number of nodes: %d\n\n",numVerts = ReadClusteringNumber(graphFilename)); if(numVerts== -1){ fprintf(stderr,"Graph input error. File %s is nonexistent, unreadable, or not a valid graph file. QUITTING.\n",graphFilename); printUsageError(); return 0; } printf("Clustering filename: %s\n",clusteringFilename); printf("Number of clusters: %d\n\n",numClust = ReadClusteringNumber(clusteringFilename)); if(numClust == -1){ fprintf(stderr,"Clustering input error. File %s is nonexistent, unreadable, or not a valid clustering file. QUITTING.\n",clusteringFilename); printUsageError(); return 0; } //printf("Number of complexes: %d\n",numberOfComplexes = ReadClusteringNumber(complexFilename)); printf("Protein name filename: %s\n\n",nameFilename); NumFunctions = strlen(functionMapping); printf("%-3dfunctional groups: %s\n\n",NumFunctions, functionMapping); printf("Prediction cutoffs:\n Size >= %d.\n Density >= %-1.3f.\n P-value <= %-.1e.\n\n", sizeCutoff, densityCutoff, pCutoff); /* FIX COMPUTATION OF BEST P-VALUE. * WRITE THE REST OF THE CHAPTER.*/ //initialize the arrays. ComplexList = new SLList[numberOfComplexes]; proteinName = new std::string[numVerts]; Function = new char[numVerts]; FunctionNum = new int[numVerts]; FunctionSize = new int[NumFunctions]; GetProteinNames(nameFilename); whichCluster = new int[numVerts]; AlreadyCounted = new bool[numVerts]; ComplexSize = new int[numberOfComplexes]; numberMatched = new int[numberOfComplexes]; numClust = CountClusters(clusteringFilename); ClusterSize = new int[numClust]; ClusterDensity = new float[numClust]; ComplexDensity = new float[numberOfComplexes]; ClusterP = new double[numClust]; ComplexP = new double[numberOfComplexes]; ComplexPee = new double[numberOfComplexes]; ClusterFunction = new int[numClust]; ComplexFunction = new int[numberOfComplexes]; ClusterMatched = new bool[numClust]; ComplexMatched = new bool[numberOfComplexes]; ClusterSMatched = new bool[numClust]; ComplexSMatched = new bool[numberOfComplexes]; MainInComplex = new int[numberOfComplexes]; MainInCluster = new int[numClust]; UnknownInComplex = new int[numberOfComplexes]; UnknownInCluster = new int[numClust]; ComplexList = new SLList[numberOfComplexes]; MaxMatch = new int[numClust]; MaxMatchPct = new float[numClust]; MaxMatchWith = new int[numClust]; MaxMatchPctWith = new int[numClust]; for(int clust = 1; clust < numClust; clust++){ MaxMatch[clust]=0; MaxMatchPct[clust]=0; MaxMatchWith[clust]=0; MaxMatchPctWith[clust]=0; } ReadClustering(clusteringFilename); graph.SetNumClust(numClust); //printf("%d clusters....\n",(int) graph.NUM_CLUST); graph.MakeGraph(graphFilename); graph.CheckAdjList(); graph.FillAdjList(); graph.InitClustering(whichCluster); //graph.SetInitialNumAndDom(); //Matching criteria //ComplexCutoff=.7; ClusterCutoff=.7; //ComConCutoff=1.1; CluConCutoff=.9; //ReadComplexes(complexFilename); // GenVertexPermutation(graph.Order); //GetComplexP(); GetClusterP(); // Match(); GetStats(); //WriteData(outputFilename); printf("\n"); WriteVerbalData2(outputFilename); //printf("PARAMS: %d %f %f\n", sizeCutoff,densityCutoff, pCutoff); //WriteComposition(outputFilename); /* for(int clust=0; clust<numClust; clust++){ if(!ClusterMatched[clust]){ printf("Cluster %d unmatched. Tops are %d/%d with %d (size %d) and %f with %d (size %d).\n", clust, MaxMatch[clust], (int) graph.ClusterSize[clust], MaxMatchWith[clust], ComplexSize[MaxMatchWith[clust]], MaxMatchPct[clust], MaxMatchPctWith[clust], ComplexSize[MaxMatchPctWith[clust]]); } } */ return 0; }
robKinematics::Errno robKinematics::Read( std::istream& is ){ ReadParameters( is ); return robJoint::Read( is ); }
int main(int argc, char *argv[]) { GainType Cost; double Time, LastTime = GetTime(); /* Read the specification of the problem */ if (argc >= 2) ParameterFileName = argv[1]; ReadParameters(); MaxMatrixDimension = 10000; ReadProblem(); if (SubproblemSize > 0) { if (DelaunayPartitioning) SolveDelaunaySubproblems(); else if (KarpPartitioning) SolveKarpSubproblems(); else if (KCenterPartitioning) SolveKCenterSubproblems(); else if (KMeansPartitioning) SolveKMeansSubproblems(); else if (RohePartitioning) SolveRoheSubproblems(); else if (MoorePartitioning || SierpinskiPartitioning) SolveSFCSubproblems(); else SolveTourSegmentSubproblems(); return EXIT_SUCCESS; } AllocateStructures(); CreateCandidateSet(); InitializeStatistics(); if (Norm != 0) BestCost = PLUS_INFINITY; else { /* The ascent has solved the problem! */ Optimum = BestCost = (GainType) LowerBound; UpdateStatistics(Optimum, GetTime() - LastTime); RecordBetterTour(); RecordBestTour(); WriteTour(OutputTourFileName, BestTour, BestCost); WriteTour(TourFileName, BestTour, BestCost); Runs = 0; } /* Find a specified number (Runs) of local optima */ for (Run = 1; Run <= Runs; Run++) { LastTime = GetTime(); Cost = FindTour(); /* using the Lin-Kernighan heuristic */ if (MaxPopulationSize > 1) { /* Genetic algorithm */ int i; for (i = 0; i < PopulationSize; i++) { GainType OldCost = Cost; Cost = MergeTourWithIndividual(i); if (TraceLevel >= 1 && Cost < OldCost) { printff(" Merged with %d: Cost = " GainFormat, i + 1, Cost); if (Optimum != MINUS_INFINITY && Optimum != 0) printff(", Gap = %0.4f%%", 100.0 * (Cost - Optimum) / Optimum); printff("\n"); } } if (!HasFitness(Cost)) { if (PopulationSize < MaxPopulationSize) { AddToPopulation(Cost); if (TraceLevel >= 1) PrintPopulation(); } else if (Cost < Fitness[PopulationSize - 1]) { i = ReplacementIndividual(Cost); ReplaceIndividualWithTour(i, Cost); if (TraceLevel >= 1) PrintPopulation(); } } } else if (Run > 1) Cost = MergeBetterTourWithBestTour(); if (Cost < BestCost) { BestCost = Cost; RecordBetterTour(); RecordBestTour(); WriteTour(OutputTourFileName, BestTour, BestCost); WriteTour(TourFileName, BestTour, BestCost); } if (Cost < Optimum) { if (FirstNode->InputSuc) { Node *N = FirstNode; while ((N = N->InputSuc = N->Suc) != FirstNode); } Optimum = Cost; printff("*** New optimum = " GainFormat " ***\n\n", Optimum); } Time = fabs(GetTime() - LastTime); UpdateStatistics(Cost, Time); if (TraceLevel >= 1 && Cost != PLUS_INFINITY) { printff("Run %d: Cost = " GainFormat, Run, Cost); if (Optimum != MINUS_INFINITY && Optimum != 0) printff(", Gap = %0.4f%%", 100.0 * (Cost - Optimum) / Optimum); printff(", Time = %0.2f sec. %s\n\n", Time, Cost < Optimum ? "<" : Cost == Optimum ? "=" : ""); } if (PopulationSize >= 2 && (PopulationSize == MaxPopulationSize || Run >= 2 * MaxPopulationSize) && Run < Runs) { Node *N; int Parent1, Parent2; Parent1 = LinearSelection(PopulationSize, 1.25); do Parent2 = LinearSelection(PopulationSize, 1.25); while (Parent2 == Parent1); ApplyCrossover(Parent1, Parent2); N = FirstNode; do { int d = C(N, N->Suc); AddCandidate(N, N->Suc, d, INT_MAX); AddCandidate(N->Suc, N, d, INT_MAX); N = N->InitialSuc = N->Suc; } while (N != FirstNode); } SRandom(++Seed); } PrintStatistics(); return EXIT_SUCCESS; }
InMemoryPerSentenceOnDemandLM::InMemoryPerSentenceOnDemandLM(const std::string &line) : LanguageModel(line) { ReadParameters(); }
SkeletonPT::SkeletonPT(const std::string &line) : PhraseDictionary("SkeletonPT", line) { ReadParameters(); }
OpSequenceModel::OpSequenceModel(const std::string &line) :StatefulFeatureFunction("OpSequenceModel", 5, line ) { ReadParameters(); }
SetSourcePhrase::SetSourcePhrase(const std::string &line) :StatelessFeatureFunction(1, line) { m_tuneable = false; ReadParameters(); }
InputFeature::InputFeature(const std::string &line) :StatelessFeatureFunction("InputFeature", line) { ReadParameters(); }
int main(int argc, char *argv[]) { float *collide_field=NULL, *stream_field=NULL, *collide_field_d=NULL, *stream_field_d=NULL, *swap=NULL, tau, wall_velocity[D_LBM], num_cells, mlups_sum; int *flag_field=NULL, *flag_field_d=NULL, xlength, t, timesteps, timesteps_per_plotting, gpu_enabled; clock_t mlups_time; size_t field_size; /* process parameters */ ReadParameters(&xlength, &tau, wall_velocity, ×teps, ×teps_per_plotting, argc, argv, &gpu_enabled); /* check if provided parameters are legitimate */ ValidateModel(wall_velocity, xlength, tau); /* initializing fields */ num_cells = pow(xlength + 2, D_LBM); field_size = Q_LBM*num_cells*sizeof(float); collide_field = (float*) malloc(field_size); stream_field = (float*) malloc(field_size); flag_field = (int*) malloc(num_cells*sizeof(int)); InitialiseFields(collide_field, stream_field, flag_field, xlength, gpu_enabled); InitialiseDeviceFields(collide_field, stream_field, flag_field, xlength, &collide_field_d, &stream_field_d, &flag_field_d); for (t = 0; t < timesteps; t++) { printf("Time step: #%d\n", t); if (gpu_enabled){ DoIteration(collide_field, stream_field, flag_field, tau, wall_velocity, xlength, &collide_field_d, &stream_field_d, &flag_field_d, &mlups_sum); /* Copy data from devcice in memory only when we need VTK output */ if (!(t%timesteps_per_plotting)) CopyFieldsFromDevice(collide_field, stream_field, xlength, &collide_field_d, &stream_field_d); } else { mlups_time = clock(); /* Copy pdfs from neighbouring cells into collide field */ DoStreaming(collide_field, stream_field, flag_field, xlength); /* Perform the swapping of collide and stream fields */ swap = collide_field; collide_field = stream_field; stream_field = swap; /* Compute post collision distributions */ DoCollision(collide_field, flag_field, tau, xlength); /* Treat boundaries */ TreatBoundary(collide_field, flag_field, wall_velocity, xlength); mlups_time = clock()-mlups_time; /* Print out the MLUPS value */ mlups_sum += num_cells/(MLUPS_EXPONENT*(float)mlups_time/CLOCKS_PER_SEC); if(VERBOSE) printf("MLUPS: %f\n", num_cells/(MLUPS_EXPONENT*(float)mlups_time/CLOCKS_PER_SEC)); } /* Print out vtk output if needed */ if (!(t%timesteps_per_plotting)) WriteVtkOutput(collide_field, flag_field, "img/lbm-img", t, xlength); } printf("Average MLUPS: %f\n", mlups_sum/(t+1)); if (VERBOSE) { if(gpu_enabled) CopyFieldsFromDevice(collide_field, stream_field, xlength, &collide_field_d, &stream_field_d); WriteField(collide_field, "img/collide-field", 0, xlength, gpu_enabled); writeFlagField(flag_field, "img/flag-field", xlength, gpu_enabled); } /* Free memory */ free(collide_field); free(stream_field); free(flag_field); FreeDeviceFields(&collide_field_d, &stream_field_d, &flag_field_d); printf("Simulation complete.\n"); return 0; }
SoftMatchingFeature::SoftMatchingFeature(const std::string &line) : StatelessFeatureFunction(0, line) { ReadParameters(); }
signed function (struct plc * plc, char const * socket) { struct channel * channel = (struct channel *)(plc->channel); struct message * message = (struct message *)(plc->message); #ifndef __GNUC__ #pragma pack (push,1) #endif struct __packed vs_host_action_ind { struct ethernet_hdr ethernet; struct qualcomm_hdr qualcomm; uint8_t MACTION; uint8_t MAJOR_VERSION; uint8_t MINOR_VERSION; } * indicate = (struct vs_host_action_ind *) (message); #ifndef __GNUC__ #pragma pack (pop) #endif byte buffer [3000]; struct plctopology * plctopology = (struct plctopology *)(buffer); signed fd = opensocket (socket); char const * FactoryNVM = plc->NVM.name; char const * FactoryPIB = plc->PIB.name; signed action; signed status; memset (buffer, 0, sizeof (buffer)); write (fd, MESSAGE, strlen (MESSAGE)); while (!done) { status = ReadMME (plc, 0, (VS_HOST_ACTION | MMTYPE_IND)); if (status < 0) { break; } if (status < 1) { PLCTopology (channel, message, plctopology); PLCTopologyPrint (plctopology); continue; } action = indicate->MACTION; memcpy (channel->peer, indicate->ethernet.OSA, sizeof (channel->peer)); if (HostActionResponse (plc)) { return (-1); } if (action == 0x00) { if (BootDevice2 (plc)) { return (-1); } if (_anyset (plc->flags, PLC_FLASH_DEVICE)) { FlashDevice2 (plc); } continue; } if (action == 0x01) { close (plc->NVM.file); if (ReadFirmware1 (plc)) { return (-1); } if ((plc->NVM.file = open (plc->NVM.name = plc->nvm.name, O_BINARY|O_RDONLY)) == -1) { error (1, errno, "%s", plc->NVM.name); } if (ResetDevice (plc)) { return (-1); } continue; } if (action == 0x02) { close (plc->PIB.file); if (ReadParameters (plc)) { return (-1); } if ((plc->PIB.file = open (plc->PIB.name = plc->pib.name, O_BINARY|O_RDONLY)) == -1) { error (1, errno, "%s", plc->PIB.name); } if (ResetDevice (plc)) { return (-1); } continue; } if (action == 0x03) { close (plc->PIB.file); if (ReadParameters (plc)) { return (-1); } if ((plc->PIB.file = open (plc->PIB.name = plc->pib.name, O_BINARY|O_RDONLY)) == -1) { error (1, errno, "%s", plc->PIB.name); } close (plc->NVM.file); if (ReadFirmware1 (plc)) { return (-1); } if ((plc->NVM.file = open (plc->NVM.name = plc->nvm.name, O_BINARY|O_RDONLY)) == -1) { error (1, errno, "%s", plc->NVM.name); } if (ResetDevice (plc)) { return (-1); } continue; } if (action == 0x04) { if (InitDevice (plc)) { return (-1); } continue; } if (action == 0x05) { close (plc->NVM.file); if ((plc->NVM.file = open (plc->NVM.name = FactoryNVM, O_BINARY|O_RDONLY)) == -1) { error (1, errno, "%s", plc->NVM.name); } close (plc->PIB.file); if ((plc->PIB.file = open (plc->PIB.name = FactoryPIB, O_BINARY|O_RDONLY)) == -1) { error (1, errno, "%s", plc->PIB.name); } if (ResetDevice (plc)) { return (-1); } continue; } if (action == 0x06) { close (plc->PIB.file); if (ReadParameters (plc)) { return (-1); } if ((plc->PIB.file = open (plc->PIB.name = plc->pib.name, O_BINARY|O_RDONLY)) == -1) { error (1, errno, "%s", plc->PIB.name); } continue; } error (0, ENOSYS, "Host Action 0x%02X", action); } close (fd); return (0); }
int tsp_lkh() { GainType Cost, OldOptimum; double Time, LastTime = GetTime(); /* Read the specification of the problem */ ReadParameters(); MaxMatrixDimension = 10000; init(); AllocateStructures(); CreateCandidateSet(); InitializeStatistics(); BestCost = PLUS_INFINITY; for (Run = 1; Run <= Runs; Run++) { LastTime = GetTime(); Cost = FindTour(); /* using the Lin-Kernighan heuristic */ if (MaxPopulationSize > 1) { /* Genetic algorithm */ int i; for (i = 0; i < PopulationSize; i++) { GainType OldCost = Cost; Cost = MergeTourWithIndividual(i); if (TraceLevel >= 1 && Cost < OldCost) { // printff(" Merged with %d: Cost = " GainFormat, i + 1, /// Cost); // if (Optimum != MINUS_INFINITY && Optimum != 0) // printff(", Gap = %0.4f%%", // 100.0 * (Cost - Optimum) / Optimum); //printff("\n"); } } if (!HasFitness(Cost)) { if (PopulationSize < MaxPopulationSize) { AddToPopulation(Cost); if (TraceLevel >= 1) PrintPopulation(); } else if (Cost < Fitness[PopulationSize - 1]) { i = ReplacementIndividual(Cost); ReplaceIndividualWithTour(i, Cost); if (TraceLevel >= 1) PrintPopulation(); } } } else if (Run > 1) Cost = MergeTourWithBestTour(); if (Cost < BestCost) { BestCost = Cost; RecordBetterTour(); RecordBestTour(); } OldOptimum = Optimum; if (Cost < Optimum) { if (FirstNode->InputSuc) { Node *N = FirstNode; while ((N = N->InputSuc = N->Suc) != FirstNode); } Optimum = Cost; //printff("*** New optimum = " GainFormat " ***\n\n", Optimum); } Time = fabs(GetTime() - LastTime); UpdateStatistics(Cost, Time); /* if (TraceLevel >= 1 && Cost != PLUS_INFINITY) { printff("Run %d: Cost = " GainFormat, Run, Cost); if (Optimum != MINUS_INFINITY && Optimum != 0) printff(", Gap = %0.4f%%", 100.0 * (Cost - Optimum) / Optimum); printff(", Time = %0.2f sec. %s\n\n", Time, Cost < Optimum ? "<" : Cost == Optimum ? "=" : ""); }*/ if (StopAtOptimum && Cost == OldOptimum && MaxPopulationSize >= 1) { Runs = Run; break; } if (PopulationSize >= 2 && (PopulationSize == MaxPopulationSize || Run >= 2 * MaxPopulationSize) && Run < Runs) { Node *N; int Parent1, Parent2; Parent1 = LinearSelection(PopulationSize, 1.25); do Parent2 = LinearSelection(PopulationSize, 1.25); while (Parent2 == Parent1); ApplyCrossover(Parent1, Parent2); N = FirstNode; do { if (ProblemType != HCP && ProblemType != HPP) { int d = C(N, N->Suc); AddCandidate(N, N->Suc, d, INT_MAX); AddCandidate(N->Suc, N, d, INT_MAX); } N = N->InitialSuc = N->Suc; } while (N != FirstNode); } SRandom(++Seed); } // PrintStatistics(); for (int i = 0; i < TSP_N; i++) { TSP_RESULT[i] = BestTour[i] - 1; } // printf("%d -¡·",BestTour[i]-1); return BestCost; }
DistortionScoreProducer::DistortionScoreProducer(const std::string &line) : StatefulFeatureFunction(1, line) { s_staticColl.push_back(this); ReadParameters(); }
PhraseDictionaryOnDisk::PhraseDictionaryOnDisk(const std::string &line) : MyBase(line) { ReadParameters(); }
SyntaxRHS::SyntaxRHS(const std::string &line) :StatelessFeatureFunction(1, line) { ReadParameters(); }
WordPenaltyProducer::WordPenaltyProducer(const std::string &line) : StatelessFeatureFunction(1, line) { ReadParameters(); }
SkeletonStatelessFF::SkeletonStatelessFF(const std::string &line) :StatelessFeatureFunction(2, line) { ReadParameters(); }
robKinematics::Errno robKinematics::Read(const Json::Value &config) { ReadParameters(config); return robJoint::Read(config); }
PhrasePenalty::PhrasePenalty(const std::string &line) : StatelessFeatureFunction(1, line) , m_perPhraseTable(false) { ReadParameters(); }
SkeletonChangeInput::SkeletonChangeInput(const std::string &line) :StatelessFeatureFunction(2, line) { ReadParameters(); }
UnknownWordPenaltyProducer::UnknownWordPenaltyProducer(const std::string &line) : StatelessFeatureFunction(1, line) { m_tuneable = false; ReadParameters(); }
PhraseBoundaryFeature::PhraseBoundaryFeature(const std::string &line) : StatefulFeatureFunction(0, line) { std::cerr << "Initializing source word deletion feature.." << std::endl; ReadParameters(); }
DeleteRules::DeleteRules(const std::string &line) :StatelessFeatureFunction(1, line) { m_tuneable = false; ReadParameters(); }