/** \param[in] model The model to score. \param[in] model_data The corresponding ModelData. \param[in] float_indices Indices of optimizable variables. \param[in] x Current value of optimizable variables. \param[out] dscore First derivatives for current state. \return The model score. */ ConjugateGradients::NT ConjugateGradients::get_score( Vector<FloatIndex> float_indices, Vector<NT> &x, Vector<NT> &dscore) { int i, opt_var_cnt = float_indices.size(); /* set model state */ for (i = 0; i < opt_var_cnt; i++) { IMP_CHECK_VALUE(x[i]); #ifdef IMP_CG_SCALE double v = get_scaled_value(float_indices[i]); // scaled #else double v = get_value(float_indices[i]); // scaled #endif if (std::abs(x[i] - v) > max_change_) { if (x[i] < v) { x[i] = v - max_change_; } else { x[i] = v + max_change_; } } #ifdef IMP_CG_SCALE set_scaled_value(float_indices[i], x[i]); #else set_value(float_indices[i], x[i]); #endif } NT score; /* get score */ try { score = get_scoring_function()->evaluate(true); } catch (ModelException) { // if we took a bad step, just return a bad score return std::numeric_limits<NT>::infinity(); } /* get derivatives */ for (i = 0; i < opt_var_cnt; i++) { #ifdef IMP_CG_SCALE dscore[i] = get_scaled_derivative(float_indices[i]); // scaled #else dscore[i] = get_derivative(float_indices[i]); // scaled #endif IMP_USAGE_CHECK(is_good_value(dscore[i]), "Bad input to CG"); } return score; }
double GSLOptimizer::evaluate_derivative(const gsl_vector *v, gsl_vector *df) { /* set model state */ write_state(v); /* get score */ double score= get_scoring_function()->evaluate(true); best_score_=std::min(score, best_score_); if (score < stop_score_) { throw AllDone(); } /* get derivatives */ { for (unsigned int i=0; i< fis_.size(); ++i) { double d= get_scaled_derivative(fis_[i]); gsl_vector_set(df, i, d); } } return score; }