/* step 1 */ void FFNet_propagate (FFNet me, const double input[], double output[]) { double act; long i, j, k = 1; long nNodes = my outputsAreLinear ? my nNodes - my nOutputs : my nNodes; /* clamp input pattern on the network */ for (i=1; i <= my nUnitsInLayer[0]; i++) my activity[i] = input[i]; /* on hidden units use activation function */ for (i=my nUnitsInLayer[0]+2; i <= nNodes; i++) { if (my isbias[i]) continue; for (act=0.0, j=my nodeFirst[i]; j <= my nodeLast[i]; j++, k++) act += my w[k] * my activity[j]; my activity[i] = my nonLinearity (me, act, & my deriv[i]); } /* on output units use another activation function */ if (my outputsAreLinear) { for (i=nNodes+1; i <= my nNodes; i++) { if (my isbias[i]) continue; for (act=0.0, j=my nodeFirst[i]; j <= my nodeLast[i]; j++, k++) act += my w[k] * my activity[j]; my activity[i] = act; my deriv[i] = 1.0; } } if (output) for (k=my nNodes-my nOutputs+1, i=1; i <= my nOutputs; i++, k++) output[i] = my activity[k]; }
/* step 1 */ void FFNet_propagate (FFNet me, const double input[], double output[]) { // clamp input pattern on the network for (long i = 1; i <= my nUnitsInLayer[0]; i++) { my activity[i] = input[i]; } // on hidden units use activation function long k = 1, nNodes = my outputsAreLinear ? my nNodes - my nOutputs : my nNodes; for (long i = my nUnitsInLayer[0] + 2; i <= nNodes; i++) { if (my isbias[i]) { continue; } double act = 0.0; for (long j = my nodeFirst[i]; j <= my nodeLast[i]; j++, k++) { act += my w[k] * my activity[j]; } my activity[i] = my nonLinearity (me, act, & my deriv[i]); } // on output units use another activation function if (my outputsAreLinear) { for (long i = nNodes + 1; i <= my nNodes; i++) { if (my isbias[i]) { continue; } double act = 0.0; for (long j = my nodeFirst[i]; j <= my nodeLast[i]; j++, k++) { act += my w[k] * my activity[j]; } my activity[i] = act; my deriv[i] = 1.0; } } k = my nNodes - my nOutputs + 1; if (output) { for (long i = 1; i <= my nOutputs; i++, k++) { output[i] = my activity[k]; } } }