Ejemplo n.º 1
0
void Optimizer::augment_sparse_linear_system(SparseSystem& W,
    const Properties& prop) {
  if (prop.method == DOG_LEG) {
    // We're using the incremental version of Powell's Dog-Leg, so we need
    // to form the updated gradient.
    const VectorXd& f_new = W.rhs();

    // Augment the running count for the sum-of-squared errors at the current
    // linearization point.
    current_SSE_at_linpoint += f_new.squaredNorm();

    // Allocate the new gradient vector
    VectorXd g_new(W.num_cols());

    // Compute W^T \cdot f_new
    VectorXd increment = mul_SparseMatrixTrans_Vector(W, f_new);

    // Set g_new = (g_old 0)^T + W^T f_new.
    g_new.head(gradient.size()) = gradient + increment.head(gradient.size());
    g_new.tail(W.num_cols() - gradient.size()) = increment.tail(
        W.num_cols() - gradient.size());

    // Cache the new gradient vector
    gradient = g_new;
  }

  // Apply Givens to QR factorize the newly augmented sparse system.
  for (int i = 0; i < W.num_rows(); i++) {
    SparseVector new_row = W.get_row(i);
    function_system._R.add_row_givens(new_row, W.rhs()(i));
  }
}