示例#1
0
void Forsythe( AbstractDistMatrix<T>& J, Int n, T alpha, T lambda )
{
    DEBUG_ONLY(CSE cse("Forsythe"))
    Jordan( J, n, lambda );
    if( n > 0 )
        J.Set( n-1, 0, alpha );
}
示例#2
0
文件: LMfit.cpp 项目: eugeneai/fityk
bool LMfit::do_iteration()
//pre: init() callled
{
    if (na_ < 1)
        throw ExecuteError("No parameters to fit.");
    iter_nr_++;
    alpha_ = alpha;
    for (int j = 0; j < na_; j++)
        alpha_[na_ * j + j] *= (1.0 + lambda);
    beta_ = beta;
    if (F_->get_verbosity() > 1) { // level: debug
        F_->ui()->mesg(print_matrix (beta_, 1, na_, "beta"));
        F_->ui()->mesg(print_matrix (alpha_, na_, na_, "alpha'"));
    }

    // Matrix solution (Ax=b)  alpha_ * da == beta_
    Jordan (alpha_, beta_, na_);

    // da is in beta_
    if (F_->get_verbosity() >= 1) { // level: verbose
        vector<realt> rel(na_);
        for (int q = 0; q < na_; q++)
            rel[q] = beta_[q] / a[q] * 100;
        if (F_->get_verbosity() >= 1)
            F_->ui()->mesg(print_matrix (rel, 1, na_, "delta(A)/A[%]"));
    }

    for (int i = 0; i < na_; i++)
        beta_[i] = a[i] + beta_[i];   // and now there is new a[] in beta_[]

    if (F_->get_verbosity() >= 1)
        output_tried_parameters(beta_);

    // compute chi2_
    chi2_ = compute_wssr(beta_, dmdm_);
    if (chi2_ < chi2) { // better fitting
        chi2 = chi2_;
        a = beta_;
        compute_derivatives(a, dmdm_, alpha, beta);
        lambda /= F_->get_settings()->lm_lambda_down_factor;
        return true;
    }
    else { // worse fitting
        lambda *= F_->get_settings()->lm_lambda_up_factor;
        return false;
    }
}