// default constructor CNeuralNet::CNeuralNet(int numInputs, int numOutputs, int numHiddenLayers, int numUnitsPerHiddenLayer) : m_numInputs(numInputs), m_numOutputs(numOutputs), m_numHiddenLayers(numHiddenLayers), m_numLayers(numHiddenLayers+2), m_numUnitsPerHiddenLayer(numUnitsPerHiddenLayer) { // set unit counts data m_numUnitsPerLayer.Add(m_numInputs); for(int i=0;i<m_numHiddenLayers;i++) m_numUnitsPerLayer.Add(m_numUnitsPerHiddenLayer); m_numUnitsPerLayer.Add(m_numOutputs); // and alloc memory GenerateNetwork(); // init settings for new network m_fAlpha = tfDefaultAlpha; m_fEta = tfDefaultEta; m_fGain = tfDefaultGain; // m_nFileMajorVersion = m_nMajorVersion; m_nFileMinorVersion = m_nMinorVersion; m_numTrainingCycles = 0; }
void main() { NET Net; int ct = 0; InitializeRandoms(); GenerateNetwork(&Net); RandomWeights(&Net); InitializeApplication(&Net); do { TrainNet(&Net, 10); if (ct % 10 == 0) { fprintf(f, "Epoch = %d\n",ct); EvaluateNet(&Net); } ct++; } while (ct<MAXCT); fprintf(f, "\n"); fprintf(f, "Finial Epoch:\n"); EvaluateNet(&Net); FinalizeApplication(&Net); }
void main() { NET Net; BOOL Stop; REAL MinTestError; InitializeRandoms(); GenerateNetwork(&Net); RandomWeights(&Net); InitializeApplication(&Net); Stop = FALSE; MinTestError = MAX_REAL; do { TrainNet(&Net, 10); TestNet(&Net); if (TestError < MinTestError) { fprintf(f, " - saving Weights ..."); MinTestError = TestError; SaveWeights(&Net); } else if (TestError > 1.2 * MinTestError) { fprintf(f, " - stopping Training and restoring Weights ..."); Stop = TRUE; RestoreWeights(&Net); } } while (NOT Stop); TestNet(&Net); EvaluateNet(&Net); FinalizeApplication(&Net); }
void main() { NET Net; InitializeRandoms(); GenerateNetwork(&Net); RandomWeights(&Net); InitializeApplication(&Net); TrainNet(&Net); WriteNet(&Net); FinalizeApplication(&Net); }
void CNeuralNet::Serialize(CArchive& ar) { if (ar.IsLoading()) { // load net info ar >> m_nFileMajorVersion; ar >> m_nFileMinorVersion; ar >> m_numTrainingCycles; ar >> m_numInputs; ar >> m_numOutputs; ar >> m_numLayers; ar >> m_numHiddenLayers; ar >> m_numUnitsPerHiddenLayer; ar >> m_fAlpha; ar >> m_fEta; ar >> m_fGain; // init layers data m_numUnitsPerLayer.Add(m_numInputs); for(int i=0;i<m_numHiddenLayers;i++) m_numUnitsPerLayer.Add(m_numUnitsPerHiddenLayer); m_numUnitsPerLayer.Add(m_numOutputs); // create the net ASSERT(m_pLayers[0] == NULL); GenerateNetwork(); // then load the net // iterate over the layers for (int nLayer=1; nLayer<m_numLayers; nLayer++) { // iterate over the nodes for (int i=1; i<=m_numUnitsPerLayer[nLayer]; i++) { // iterate over the connections to the previous layer for(int j=0;j<m_numUnitsPerLayer[nLayer-1]+1;j++) { ar >> m_pLayers[nLayer]->ppfWeight[i][j]; ar >> m_pLayers[nLayer]->ppfWeightSave[i][j]; ar >> m_pLayers[nLayer]->ppfDeltaWeight[i][j]; } } } }
void main() { NET Net; REAL Error; BOOL Stop; INT n,m; InitializeRandoms(); GenerateNetwork(&Net); RandomWeights(&Net); InitializeApplication(&Net); do { Error = 0; Stop = TRUE; for (n=0; n<NUM_DATA; n++) { SimulateNet(&Net, Input[n], Output[n], FALSE, FALSE); Error = MAX(Error, Net.Error); Stop = Stop AND (Net.Error < Net.Epsilon); } Error = MAX(Error, Net.Epsilon); printf("Training %0.0f%% completed ...\n", (Net.Epsilon / Error) * 100); if (NOT Stop) { for (m=0; m<10*NUM_DATA; m++) { n = RandomEqualINT(0, NUM_DATA-1); SimulateNet(&Net, Input[n], Output[n], TRUE, FALSE); } } } while (NOT Stop); for (n=0; n<NUM_DATA; n++) { SimulateNet(&Net, Input[n], Output[n], FALSE, TRUE); } FinalizeApplication(&Net); }