void get_transverseShimSums(int fromPars, int *pos, int *neg) { *pos = *neg = 0; if (shimset > 0 && shimset <= MAX_SHIMSET) { register int index; double dbltmp; int hwShimVal; for (index=Z0 + 1; index <= MAX_SHIMS; index++) { if (ISACTIVE(index) && ISTRANSVERSE(index) ) { if (fromPars) { P_getreal( CURRENT, sh_names[index], &dbltmp, 1); } else { getExpStatusShim(index, &hwShimVal); dbltmp = (double) hwShimVal; } if (dbltmp >= 0.0) *pos += dbltmp; else *neg -= dbltmp; } } } return; }
void printQuads() { int i; for (i = 1; i < quadNext; i++){ if (ISACTIVE(q[i].num)) fprintf(iout,"%d: %s, %s, %s, %s\n", q[i].num, otostr(q[i].op), q[i].x->name, q[i].y->name, q[i].z->name); } }
static int isactive(int index ) { if (shimset > 0 && shimset <= MAX_SHIMSET) return( ISACTIVE(index) ); else { return( 131071 ); } }
static void channel_update_listed (channel_t *c) { DEBUG(" %s %x/%x/%x (%d) %s", c->which, c->status, c->flags, c->events, count_epoll, __FUNCTION__); ASSERT(ISLISTED(c)); uint32_t evt = channel_events(c); if (ISMONITORED(c)) { channel_rearm(c, evt); } else if (ISACTIVE(c)) { c->events = evt; } }
static double perceptron_mean_square_error(double * actual, size_t code, int n){ int i = 0; double dif,sum; dif = sum = 0.0; if( n == 0 ){ printerr("perceptron_mean_square_error: Lenght 0\n"); return 0; } for(; i < n; ++i) { dif = actual[i] - (ISACTIVE(i, code)); sum += dif * dif; } return 0.5 * (sum / n); }
/** * Computes backpropagation for a perceptron and a given pattern. * Raw version which perform the calculations and * * @param per Initialized perceptron * @param pat Initialized pattern * @param code Active neuron in output pattern * @param lrate Learning rate * @return 0 if unsuccessful, 1 otherwise */ int perceptron_backpropagation_raw(perceptron per, pattern pat, size_t code, double lrate){ int n = 0, i = 0, j = 0, k = 0, err = 1; double Dj_in, Dj, sum; /* Rename temp delta vectors */ double ** d = per->d, /* Deltas */ ** rin = per->rw, /* Raw neuron inputs */ *** dw = per->dw; /* Weight Deltas */ /* Set input layer values * We just make net[0] to point to the pattern so we don't have to copy all * of it each time. */ per->net[0] = pat; /* Compute feed forward */ /* For the input and hidden layers */ for(i = 0; i < 2; ++i) /* For all neurons in layer (+ bias) */ for(j = 0; j < per->n[i] + 1; ++j) /* For all neurons in next layer (no bias) */ for(k = 0; k < per->n[i+1]; ++k) { /* Sum all neurons (j=n[i]) in layer by its * weight w[j][k] to a certain neuron (k) */ /* sum = perceptron_weighted_sum(per->net[i], per->w[i], k, per->n[i] + 1); */ sum = 0; n = per->n[i] + 1; while( n-- ) sum += per->net[i][n] * per->w[i][n][k]; rin[i][k] = sum; /* Save raw input to be used later */ per->net[i+1][k] = perceptron_bipolarsigmoid(sum); } /* Calculate output layer (i = 2) backpropagation */ for(k = 0; k < per->n[2]; ++k){ /* Get the already computed Yk in */ /* Yk_in = perceptron_weighted_sum(per->net[1], per->w[1], k, per->n[1] + 1); */ /* Yk_in = rin[1][k]; */ /* Calculate dk against desired output (neuron k should match the code) */ d[1][k] = (ISACTIVE(k, code) - per->net[2][k]) * perceptron_bipolarsigmoid_prima(rin[1][k]); /* Calculate weight deltas for all weights to this neuron from the previous layer */ for(j = 0; j < per->n[1] + 1; ++j) dw[1][j][k] = lrate * d[1][k] * per->net[1][j]; } /* Calculate hidden layer (i = 1) backpropagation */ for(j = 0; j < per->n[1]; ++j){ /* Get the already computed Zj_in */ /* Zj_in = perceptron_weighted_sum(per->net[0], per->w[0], j, per->n[0] + 1); */ /* Zj_in = rin[0][j]; */ /* Calculate Dj_in based on output layer deltas and the hidden layer weights */ Dj_in = 0; for(k = 0; k < per->n[2]; ++k) Dj_in += d[1][k] * per->w[1][j][k]; /* Calculate delta */ Dj = Dj_in * perceptron_bipolarsigmoid_prima(rin[0][j]); /* Calculate weight deltas for all weights to this neuron from the previous layer */ for(i = 0; i < per->n[0] + 1; ++i) dw[0][i][j] = lrate * Dj * per->net[0][i]; } /* Update weights */ /* For the weighted layers */ for(i = 0; i < 2; ++i) /* For each neuron (+ bias) */ for(j = 0; j < per->n[i] + 1; ++j) /* To all neurons in the next layer */ for(k = 0; k < per->n[i + 1]; ++k) per->w[i][j][k] += dw[i][j][k]; return err; }