Points createProblem(int size){ static bool flag = true; if(flag) std::srand(std::time(0)), flag = false; Points r; for(int i=0; i<size; i++){ Point p = {i, rand_(), rand_()}; r.push_back(p); } return r; }
Eigen::VectorXd GaussianCovProposal::propose(uint id, const Eigen::VectorXd &sample, double sigma) { uint n = sample.size(); Eigen::VectorXd randn(n); for (uint i = 0; i < n; i++) randn(i) = rand_(gen_); return sample + sigL_[id] * randn * sigma * sigma; }
void train(net_t *net, int n, nfloat_t *set, int rows_n, int image_size, int output_size) { int i = 0, j = 0, r = 0; int input_size = image_size * image_size; nfloat_t *input, *output; int *hash; input = (nfloat_t *) malloc(input_size * sizeof(nfloat_t)); hash = (int *) malloc(rows_n * sizeof(int)); for (i = 0; i < rows_n; ++i) hash[i] = i; for (i = 0; i < rows_n; ++i) { r = rand_(0, rows_n - 1); j = hash[i]; hash[i] = hash[r]; hash[r] = j; } for (i = 0; i < rows_n; ++i) { r = hash[i]; //input = set + r * (input_size + output_size); memcpy(input, set + r * (input_size + output_size), input_size * sizeof(nfloat_t)); /* Zakłócenia */ for (j = 0; j < input_size; ++j) { input[j] += ((nfloat_t) rand() / RAND_MAX) * 0.2 + 0.1; if (input[j] < -1) input[j] = -1; if (input[j] > 1) input[j] = 1; } for (j = 0; j < rand_(0, 5); ++j) input[rand_(0, input_size - 1)] = rand_(0, 1) == 0 ? 1.0 : -1.0; output = set + r * (input_size + output_size) + input_size; //~ print_input_data(input, image_size); //~ print_output_data(output, output_size); net_learn(net, N, input, output); } }
bool MonteCarlo::do_accept_or_reject_move(double score, double last, double proposal_ratio) { bool ok = false; if (score < last) { ++stat_downward_steps_taken_; ok = true; if (score < best_energy_ && return_best_) { best_ = new Configuration(get_model()); best_energy_ = score; } } else { double diff = score - last; double e = std::exp(-diff / temp_); double r = rand_(random_number_generator); IMP_LOG_VERBOSE(diff << " " << temp_ << " " << e << " " << r << std::endl); if (e * proposal_ratio > r) { ++stat_upward_steps_taken_; ok = true; } else { ok = false; } } if (ok) { IMP_LOG_TERSE("Accept: " << score << " previous score was " << last << std::endl); last_energy_ = score; update_states(); for (int i = get_number_of_movers() - 1; i >= 0; --i) { get_mover(i)->accept(); } return true; } else { IMP_LOG_TERSE("Reject: " << score << " current score stays " << last << std::endl); for (int i = get_number_of_movers() - 1; i >= 0; --i) { get_mover(i)->reject(); } ++stat_num_failures_; if (isf_) { isf_->reset_moved_particles(); } return false; } }
Random::Random() : mt_(rand_()) {}
int mersenne_twister_u16(int low, int high) { std::uniform_int_distribution<> rand_(low, high); return static_cast<int>(rand_(mt)); }
float mersenne_twister_f32(float low, float high){ std::uniform_real_distribution<> rand_(low, high); return static_cast<float>(rand_(mt)); }
/* P. 704. */ doublereal dgrand_(integer *n) { /* Initialized data */ static doublereal d__[60] = { .67448975,.47585963,.383771164,.328611323, .291142827,.263684322,.242508452,.225567444,.211634166,.199924267, .189910758,.181225181,.1736014,.166841909,.160796729,.155349717, .150409384,.145902577,.141770033,.137963174,.134441762,.13117215, .128125965,.12527909,.122610883,.12010356,.117741707,.115511892, .113402349,.11140272,.109503852,.107697617,.105976772,.104334841, .102766012,.101265052,.099827234,.098448282,.097124309,.095851778, .094627461,.093448407,.092311909,.091215482,.090156838,.089133867, .088144619,.087187293,.086260215,.085361834,.084490706,.083645487, .082824924,.082027847,.081253162,.080499844,.079766932,.079053527, .078358781,.077681899 }; static doublereal u = 0.f; /* System generated locals */ doublereal ret_val; /* Local variables */ static doublereal a; static integer i__; static doublereal v, w; extern doublereal rand_(integer *); /* EXCEPT ON THE FIRST CALL GRAND RETURNS A */ /* PSEUDO-RANDOM NUMBER HAVING A GAUSSIAN (I.E. */ /* NORMAL) DISTRIBUTION WITH ZERO MEAN AND UNIT */ /* STANDARD DEVIATION. THUS, THE DENSITY IS F(X) = */ /* EXP(-0.5*X**2)/SQRT(2.0*PI). THE FIRST CALL */ /* INITIALIZES GRAND AND RETURNS ZERO. */ /* THE PARAMETER N IS DUMMY. */ /* GRAND CALLS A FUNCTION RAND, AND IT IS ASSUMED THAT */ /* SUCCESSIVE CALLS TO RAND(0) GIVE INDEPENDENT */ /* PSEUDO- RANDOM NUMBERS DISTRIBUTED UNIFORMLY ON (0, */ /* 1), POSSIBLY INCLUDING 0 (BUT NOT 1). */ /* THE METHOD USED WAS SUGGESTED BY VON NEUMANN, AND */ /* IMPROVED BY FORSYTHE, AHRENS, DIETER AND BRENT. */ /* ON THE AVERAGE THERE ARE 1.37746 CALLS OF RAND FOR */ /* EACH CALL OF GRAND. */ /* WARNING - DIMENSION AND DATA STATEMENTS BELOW ARE */ /* MACHINE-DEPENDENT. */ /* DIMENSION OF D MUST BE AT LEAST THE NUMBER OF BITS */ /* IN THE FRACTION OF A FLOATING-POINT NUMBER. */ /* THUS, ON MOST MACHINES THE DATA STATEMENT BELOW */ /* CAN BE TRUNCATED. */ /* IF THE INTEGRAL OF SQRT(2.0/PI)*EXP(-0.5*X**2) FROM */ /* A(I) TO INFINITY IS 2**(-I), THEN D(I) = A(I) - */ /* A(I-1). */ /* END OF MACHINE-DEPENDENT STATEMENTS */ /* U MUST BE PRESERVED BETWEEN CALLS. */ /* INITIALIZE DISPLACEMENT A AND COUNTER I. */ a = 0.f; i__ = 0; /* INCREMENT COUNTER AND DISPLACEMENT IF LEADING BIT */ /* OF U IS ONE. */ L10: u += u; if (u < 1.f) { goto L20; } u += -1.f; ++i__; a -= d__[i__ - 1]; goto L10; /* FORM W UNIFORM ON 0 .LE. W .LT. D(I+1) FROM U. */ L20: w = d__[i__] * u; /* FORM V = 0.5*((W-A)**2 - A**2). NOTE THAT 0 .LE. V */ /* .LT. LOG(2). */ v = w * (w * .5f - a); /* GENERATE NEW UNIFORM U. */ L30: u = rand_(&c__0); /* ACCEPT W AS A RANDOM SAMPLE IF V .LE. U. */ if (v <= u) { goto L40; } /* GENERATE RANDOM V. */ v = rand_(&c__0); /* LOOP IF U .GT. V. */ if (u > v) { goto L30; } /* REJECT W AND FORM A NEW UNIFORM U FROM V AND U. */ u = (v - u) / (1.f - u); goto L20; /* FORM NEW U (TO BE USED ON NEXT CALL) FROM U AND V. */ L40: u = (u - v) / (1.f - v); /* USE FIRST BIT OF U FOR SIGN, RETURN NORMAL VARIATE. */ u += u; if (u < 1.f) { goto L50; } u += -1.f; ret_val = w - a; return ret_val; L50: ret_val = a - w; return ret_val; } /* dgrand_ */
Storage* get_random() { return storages_[rand_(storages_.size())]; }