static int get_range(char *args, range_t * range, char **nextarg) { if (isdigit(*args)) { char *nextarg; args = strtok_r(args, whitespace, &nextarg); range->offset = get_scaled_value(args, "offset"); if (range->offset == BOGUS_SIZE) return CMD_ERROR; args = nextarg + strspn(nextarg, whitespace); /* * <length> ... only if offset specified */ if (*args != '\0') { args = strtok_r(args, whitespace, &nextarg); if (*args != '*') { range->length = get_scaled_value(args, "length"); if (range->length == BOGUS_SIZE) return CMD_ERROR; } else range->length = 0; /* map to end of file */ args = nextarg + strspn(nextarg, whitespace); } } *nextarg = args; return CMD_SUCCESS; }
gsl_vector* GSLOptimizer::get_state() const { gsl_vector *r= gsl_vector_alloc (get_dimension()); for (unsigned int i=0; i< fis_.size(); ++i) { gsl_vector_set(r, i, get_scaled_value(fis_[i])); } return r; }
/* * shmem_seg - create [shmget] and register a SysV shared memory segment * of specified size */ static int shmem_seg(char *args) { glctx_t *gcp = &glctx; char *segname, *nextarg; range_t range = { 0L, 0L }; args += strspn(args, whitespace); if (!required_arg(args, "<seg-name>")) return CMD_ERROR; segname = strtok_r(args, whitespace, &nextarg); args = nextarg + strspn(nextarg, whitespace); if (!required_arg(args, "<size>")) return CMD_ERROR; args = strtok_r(args, whitespace, &nextarg); range.length = get_scaled_value(args, "size"); if (range.length == BOGUS_SIZE) return CMD_ERROR; args = nextarg + strspn(nextarg, whitespace); if (!segment_register(SEGT_SHM, segname, &range, MAP_SHARED)) return CMD_ERROR; return CMD_SUCCESS; }
/* * anon_seg: <seg-name> <size>[kmgp] [private|shared] */ static int anon_seg(char *args) { glctx_t *gcp = &glctx; char *segname, *nextarg; range_t range = { 0L, 0L }; int segflag = 0; args += strspn(args, whitespace); if (!required_arg(args, "<seg-name>")) return CMD_ERROR; segname = strtok_r(args, whitespace, &nextarg); args = nextarg + strspn(nextarg, whitespace); if (!required_arg(args, "<size>")) return CMD_ERROR; args = strtok_r(args, whitespace, &nextarg); range.length = get_scaled_value(args, "size"); if (range.length == BOGUS_SIZE) return CMD_ERROR; args = nextarg + strspn(nextarg, whitespace); if (*args != '\0') { segflag = get_shared(args); if (segflag == -1) return CMD_ERROR; } if (!segment_register(SEGT_ANON, segname, &range, segflag)) return CMD_ERROR; return CMD_SUCCESS; }
/* * Convert a single threshold string or paren groups of thresh's as * described below. All thresh's are saved to an allocated list at * *vlistp; the caller will need to free that space. On return: * *vcntp is the count of the vlist array, and vlist is either * a single thresh or N groups of thresh's with a trailing zero: * (cnt_1 thr_1a thr_1b [...]) ... (cnt_N thr_Na thr_Nb [...]) 0. * Returns 0 when all conversions were OK, and 1 for any syntax, * conversion, or alloc error. */ static int get_thresh(int **vlistp, int *vcntp) { int argn, value, gci = 0, grp_cnt = 0, paren = 0, nerr = 0; char *rp, *src; for (argn = 2; (src = LINEARG(argn)) != NULL; argn++) { if (*src == LPAREN) { gci = *vcntp; if ((nerr = vlist_append(vlistp, vcntp, 0)) != 0) break; paren = 1; src++; } if (*(rp = LASTBYTE(src)) == RPAREN) { if (paren) { grp_cnt = *vcntp - gci; *(*vlistp + gci) = grp_cnt; paren = 0; *rp = '\0'; } else { nerr = 1; break; } } value = get_scaled_value(src, &nerr); if (nerr || (nerr = vlist_append(vlistp, vcntp, value))) break; } if (nerr == 0 && grp_cnt) nerr = vlist_append(vlistp, vcntp, 0); return (nerr); }
/* * Common function to set a system or cpu threshold. */ static int cmnthr(int req) { int value, nerr = 0, upval = OKUP; char *thresh = LINEARG(1); if (strcmp(thresh, always_on) == 0) value = INT_MAX; else if ((value = get_scaled_value(thresh, &nerr)) < 0 || nerr) { mesg(MERR, "%s must be a positive value\n", LINEARG(0)); upval = NOUP; } if (upval == OKUP) (void) ioctl(pm_fd, req, value); return (upval); }
/** \param[in] model The model to score. \param[in] model_data The corresponding ModelData. \param[in] float_indices Indices of optimizable variables. \param[in] x Current value of optimizable variables. \param[out] dscore First derivatives for current state. \return The model score. */ ConjugateGradients::NT ConjugateGradients::get_score( Vector<FloatIndex> float_indices, Vector<NT> &x, Vector<NT> &dscore) { int i, opt_var_cnt = float_indices.size(); /* set model state */ for (i = 0; i < opt_var_cnt; i++) { IMP_CHECK_VALUE(x[i]); #ifdef IMP_CG_SCALE double v = get_scaled_value(float_indices[i]); // scaled #else double v = get_value(float_indices[i]); // scaled #endif if (std::abs(x[i] - v) > max_change_) { if (x[i] < v) { x[i] = v - max_change_; } else { x[i] = v + max_change_; } } #ifdef IMP_CG_SCALE set_scaled_value(float_indices[i], x[i]); #else set_value(float_indices[i], x[i]); #endif } NT score; /* get score */ try { score = get_scoring_function()->evaluate(true); } catch (ModelException) { // if we took a bad step, just return a bad score return std::numeric_limits<NT>::infinity(); } /* get derivatives */ for (i = 0; i < opt_var_cnt; i++) { #ifdef IMP_CG_SCALE dscore[i] = get_scaled_derivative(float_indices[i]); // scaled #else dscore[i] = get_derivative(float_indices[i]); // scaled #endif IMP_USAGE_CHECK(is_good_value(dscore[i]), "Bad input to CG"); } return score; }
void GSLOptimizer::update_state(gsl_vector*x) const { for (unsigned int i=0; i< fis_.size(); ++i) { gsl_vector_set(x, i, get_scaled_value(fis_[i])); } }
Float ConjugateGradients::do_optimize(unsigned int max_steps) { IMP_OBJECT_LOG; IMP_USAGE_CHECK(get_model(), "Must set the model on the optimizer before optimizing"); clear_range_cache(); Vector<NT> x, dx; int i; // ModelData* model_data = get_model()->get_model_data(); FloatIndexes float_indices = get_optimized_attributes(); int n = float_indices.size(); if (n == 0) { IMP_THROW("There are no optimizable degrees of freedom.", ModelException); } x.resize(n); dx.resize(n); // get initial state in x(n): for (i = 0; i < n; i++) { #ifdef IMP_CG_SCALE x[i] = get_scaled_value(float_indices[i]); // scaled #else x[i] = get_value(float_indices[i]); // scaled #endif IMP_USAGE_CHECK( !IMP::isnan(x[i]) && std::abs(x[i]) < std::numeric_limits<NT>::max(), "Bad input to CG"); } // Initialize optimization variables int ifun = 0; int nrst; NT dg1, xsq, dxsq, alpha, step, u1, u2, u3, u4; NT f = 0., dg = 1., w1 = 0., w2 = 0., rtst, bestf; bool gradient_direction; // dx holds the gradient at x // search holds the search vector // estimate holds the best current estimate to the minimizer // destimate holds the gradient at the best current estimate // resy holds the restart Y vector // ressearch holds the restart search vector Vector<NT> search, estimate, destimate, resy, ressearch; search.resize(n); estimate.resize(n); destimate.resize(n); resy.resize(n); ressearch.resize(n); /* Calculate the function and gradient at the initial point and initialize nrst,which is used to determine whether a Beale restart is being done. nrst=n means that this iteration is a restart iteration. */ g20: f = get_score(float_indices, x, dx); if (get_stop_on_good_score() && get_scoring_function()->get_had_good_score()) { estimate = x; goto end; } ifun++; nrst = n; // this is a gradient, not restart, direction: gradient_direction = true; /* Calculate the initial search direction, the norm of x squared, and the norm of dx squared. dg1 is the current directional derivative, while xsq and dxsq are the squared norms. */ dg1 = xsq = 0.; for (i = 0; i < n; i++) { search[i] = -dx[i]; xsq += x[i] * x[i]; dg1 -= dx[i] * dx[i]; } dxsq = -dg1; /* Test if the initial point is the minimizer. */ if (dxsq <= cg_eps * cg_eps * std::max(NT(1.0), xsq)) { goto end; } /* Begin the major iteration loop. */ g40: update_states(); /* Begin linear search. alpha is the steplength. */ if (gradient_direction) { /* This results in scaling the initial search vector to unity. */ alpha = 1.0 / sqrt(dxsq); } else if (nrst == 1) { /* Set alpha to 1.0 after a restart. */ alpha = 1.0; } else { /* Set alpha to the nonrestart conjugate gradient alpha. */ alpha = alpha * dg / dg1; } /* Store current best estimate for the score */ estimate = x; destimate = dx; /* Try to find a better score by linear search */ if (!line_search(x, dx, alpha, float_indices, ifun, f, dg, dg1, max_steps, search, estimate)) { /* If the line search failed, it was either because the maximum number of iterations was exceeded, or the minimum could not be found */ if (static_cast<unsigned int>(ifun) > max_steps) { goto end; } else if (gradient_direction) { goto end; } else { goto g20; } } /* THE LINE SEARCH HAS CONVERGED. TEST FOR CONVERGENCE OF THE ALGORITHM. */ dxsq = xsq = 0.0; for (i = 0; i < n; i++) { dxsq += dx[i] * dx[i]; xsq += x[i] * x[i]; } if (dxsq < threshold_) { goto end; } /* Search continues. Set search(i)=alpha*search(i),the full step vector. */ for (i = 0; i < n; i++) { search[i] *= alpha; } /* COMPUTE THE NEW SEARCH VECTOR; TEST IF A POWELL RESTART IS INDICATED. */ rtst = 0.; for (i = 0; i < n; ++i) { rtst += dx[i] * destimate[i]; } if (std::abs(rtst / dxsq) > .2) { nrst = n; } /* If a restart is indicated, save the current d and y as the Beale restart vectors and save d'y and y'y in w1 and w2. */ if (nrst == n) { ressearch = search; w1 = w2 = 0.; for (i = 0; i < n; i++) { resy[i] = dx[i] - destimate[i]; w1 += resy[i] * resy[i]; w2 += search[i] * resy[i]; } } /* CALCULATE THE RESTART HESSIAN TIMES THE CURRENT GRADIENT. */ u1 = u2 = 0.0; for (i = 0; i < n; i++) { u1 -= ressearch[i] * dx[i] / w1; u2 += ressearch[i] * dx[i] * 2.0 / w2 - resy[i] * dx[i] / w1; } u3 = w2 / w1; for (i = 0; i < n; i++) { estimate[i] = -u3 * dx[i] - u1 * resy[i] - u2 * ressearch[i]; } /* If this is a restart iteration, estimate contains the new search vector. */ if (nrst != n) { /* NOT A RESTART ITERATION. CALCULATE THE RESTART HESSIAN TIMES THE CURRENT Y. */ u1 = u2 = u3 = 0.0; for (i = 0; i < n; i++) { u1 -= (dx[i] - destimate[i]) * ressearch[i] / w1; u2 = u2 - (dx[i] - destimate[i]) * resy[i] / w1 + 2.0 * ressearch[i] * (dx[i] - destimate[i]) / w2; u3 += search[i] * (dx[i] - destimate[i]); } step = u4 = 0.; for (i = 0; i < n; i++) { step = (w2 / w1) * (dx[i] - destimate[i]) + u1 * resy[i] + u2 * ressearch[i]; u4 += step * (dx[i] - destimate[i]); destimate[i] = step; } /* CALCULATE THE DOUBLY UPDATED HESSIAN TIMES THE CURRENT GRADIENT TO OBTAIN THE SEARCH VECTOR. */ u1 = u2 = 0.0; for (i = 0; i < n; i++) { u1 -= search[i] * dx[i] / u3; u2 += (1.0 + u4 / u3) * search[i] * dx[i] / u3 - destimate[i] * dx[i] / u3; } for (i = 0; i < n; i++) { estimate[i] = estimate[i] - u1 * destimate[i] - u2 * search[i]; } } /* CALCULATE THE DERIVATIVE ALONG THE NEW SEARCH VECTOR. */ search = estimate; dg1 = 0.0; for (i = 0; i < n; i++) { dg1 += search[i] * dx[i]; } /* IF THE NEW DIRECTION IS NOT A DESCENT DIRECTION,STOP. */ if (dg1 <= 0.0) { /* UPDATE NRST TO ASSURE AT LEAST ONE RESTART EVERY N ITERATIONS. */ if (nrst == n) { nrst = 0; } nrst++; gradient_direction = false; goto g40; } /* ROUNDOFF HAS PRODUCED A BAD DIRECTION. */ end: // If the 'best current estimate' is better than the current state, return // that: bestf = get_score(float_indices, estimate, destimate); if (bestf < f) { f = bestf; } else { // Otherwise, restore the current state x (note that we already have the // state x and its derivatives dx, so it's rather inefficient to // recalculate the score here, but it's cleaner) f = get_score(float_indices, x, dx); } update_states(); return f; }