void solve_conductance_network ( int *pcode, double voltage_vect[], wn_sparse_matrix passed_conductance_graph, /* assumed to be symmetric */ double passed_stimulus_vect[], stimulus_type passed_stimulus_type_vect[] ) { double val_min; int i; conductance_graph = passed_conductance_graph; stimulus_vect = passed_stimulus_vect; stimulus_type_vect = passed_stimulus_type_vect; wn_assert(conductance_graph->len_i == conductance_graph->len_j); len = conductance_graph->len_i; count = 0; wn_conj_gradient_method(pcode,&val_min, voltage_vect,len,&function,&gradient,2*len); for(i=0;i<len;++i) { if(stimulus_type_vect[i] == voltage) { voltage_vect[i] = stimulus_vect[i]; } } }
// Rescale BLOSUM matrix for query amino acid frequencies with conjugate gradient method. Used in kDP void Matrix::wn_rescale(Sequence *s) throw (std::exception){ init_p_query(s); parameter_wrapper pwrap; pwrap.matrix = original; pwrap.p_query = p_query; int code=0; double val_min; const int dim=20; const int max_iterations=40; double x[dim]; for( size_t a=0; a<20; ++a) x[a] = pow(p_query[a]/p_background[a], 0.7121); wn_conj_gradient_method(&code, &val_min, x, dim, &_func, &_grad, max_iterations, &pwrap); if( code!=WN_SUCCESS && code!=WN_SUBOPTIMAL) throw MyException("wn_conj_gradient_method failed: return-code is '%i'\n", code); for (int a=0; a<20; ++a) for (int b=0; b<20; ++b) matrix[a][b] = 2.0*_log2(x[a]*original[a][b]*x[b]/p_query[a]/p_query[b]); bool bprint=false; double za[20] = {0.0f}; for (int a=0; a<20; ++a){ if( x[a]<0.0 ) bprint=true; for (int b=0; b<20; ++b) za[a] += original[a][b] * x[b]; } double error=0.0; for (int a=0; a<20; ++a){ double tmp = 1.0-(za[a]*x[a])/p_query[a]; error += tmp*tmp; } error_sum += error; if(bprint) throw MyException("One or more negative scaling factor(s) occurred while rescaling the amino acid substitution matrix for '%s'!", s->get_header()); _init_main_diagonal_scores(); /* if(bprint || error >0.01){ if(bprint) ++negatives; printf("Error: %5.5f\n", error); fprintf(stderr, "Minimizing factors (x[])\n"); for( int i=0; i<20; ++i){ fprintf(stderr, "%2.4f ", x[i]); } fprintf(stderr, "\n"); for (int a=0; a<20; ++a){ fprintf(stderr, "%2.4f ", (za[a]*x[a])/p_query[a]); } fprintf(stderr, "\n"); }*/ }
void main(void) { int code,iterations; double val_min; initialize(); total_count = 0; s = 1.0; /* for(;;) for(s=1.0;;s *= 2) */ { /* printf("enter s iterations: "); scanf("%lf %d",&s,&iterations); iterations = WN_IHUGE; iterations = SIZE; */ iterations = WN_IHUGE; printf("s = %lf\n",s); count = 0; wn_conj_gradient_method(&code,&val_min, solution_vect,SIZE*SIZE,(function),(gradient), iterations); printf("final result: code = %d ",code); printf(" ob = %20.20lf\n",val_min); printf("total_count=%d,count=%d.\n",total_count,count); /* xlate_to_mat(solution_mat,solution_vect); wn_print_mat(solution_mat,SIZE,SIZE); */ } wn_gpfree(); }
int wn_selftest_conjdir(int argc, char **argv) { double *coords, *x_coords, *y_coords; int code; double val_min; bool traces = lo_getenv_bool("WN_CONJDIR_SELFTEST_TRACES"); bool skip_direction = lo_getenv_bool("WN_CONJDIR_SELFTEST_SKIP_DIRECTION"); bool randomize = !lo_getenv_bool("WN_CONJDIR_SELFTEST_DONT_RANDOMIZE"); int row, col, k, sts; fprintf(stderr, "testing conjdir...\n"); if (1 < argc) { wn_assert(2 == argc); sts = sscanf(argv[1], "%d", &lo_dim); wn_assert(1 == sts); wn_assert(1 <= lo_dim && 10000 >= lo_dim); } else { lo_dim = 10; } lo_dim_square = wn_square(lo_dim); lo_left_wall_x = lo_bottom_wall_y = -1; lo_right_wall_x = lo_top_wall_y = lo_dim; coords = (double *) wn_zalloc(lo_dim_square * 2 * sizeof(double)); x_coords = coords; y_coords = coords + lo_dim_square; if (!skip_direction) { if (randomize) { lo_randomize_coords(coords); } wn_conj_direction_method(&code, &val_min, coords, NULL, /**/ lo_dim_square*2, &lo_conjdir_selftest_vector_cost, WN_IHUGE); if (traces) { lo_display_coords(coords); printf("Return code = %d, val_min = %g\n", code, val_min); } wn_assert(WN_SUCCESS == code); lo_check_result(coords); } if (traces) { printf("\nNow the gradient method\n\n"); } if (randomize) { lo_randomize_coords(coords); } else { for (row = 0; row < lo_dim; ++row) { for (col = 0; col < lo_dim; ++col) { k = row*lo_dim + col; x_coords[k] = y_coords[k] = 0.0; } } } code = 10; val_min = -48; wn_conj_gradient_method(&code, &val_min, coords, /**/ lo_dim_square*2, &lo_conjdir_selftest_vector_cost, /**/ &lo_conjdir_selftest_vect_gradient, WN_IHUGE); if (traces) { lo_display_coords(coords); printf("Return code = %d, val_min = %g\n", code, val_min); } wn_assert(WN_SUCCESS == code); lo_check_result(coords); fprintf(stderr, " ok!!!!!!\n"); return 0; } /* main */
void main(void) { int code,i; double val_min,val; double *vect; wn_gpmake("no_free"); wn_make_vect(&p,SIZE); wn_make_vect(&vect,SIZE); for(i=0;i<SIZE;++i) { val = wn_normal_distribution(); p[i] = wn_abs(val); /* p[i] = 1.0; */ } for(i=0;i<SIZE-1;++i) { vect[i] = 1.0/SIZE*(1.0+0.01*wn_normal_distribution()); /* vect[i] = 1.0/SIZE; */ } wn_conj_gradient_method(&code,&val_min, vect,SIZE-1,(function),(gradient), WN_IHUGE); /* wn_conj_direction_method(&code,&val_min, vect,SIZE-1,(function), WN_IHUGE); */ printf("final result: code = %d ",code); printf(" ob = %lf\n",val_min); printf("total_count=%d,count=%d.\n",total_count,count); /* wn_print_vect(p,SIZE); { int i; double val; val = 1.0; for(i=0;i<SIZE-1;++i) { val -= vect[i]; } vect[SIZE-1] = val; } wn_print_vect(vect,SIZE); { int i; for(i=0;i<SIZE-1;++i) { printf("%lf ",p[i]/(vect[i]*vect[i])); } } */ wn_gpfree(); }