NxReal DrawFromBoundedPowerLaw( float a0, float a1, float alfa ) { // using the inverse transformation law for probabilities. see for example, NR3 p.362, although the explanation sucks. // for the pdf p(a)=C*a^-alfa, between a0 and a1. float t=gRNG.doub(); float onemalfa=1-alfa; float a = NxMath::pow(t*(NxMath::pow(a1,onemalfa)-NxMath::pow(a0,onemalfa)) + NxMath::pow(a0,onemalfa) , 1.0f/(onemalfa)); return a; }
/* Utility method to boot-strap the current node set size. Randomly select 80% of nodes from current node set. */ void getSubSample( vector<int> &keys ) { vector<node> Cn; vector<edge> Celist; for(int i=1; i<Gn.size(); i++) { Gn[i].c = -1; Cn.push_back(Gn[i].Clone()); } for(int i=0; i<Gelist.size(); i++) { Celist.push_back(Gelist[i].Clone()); } vector<int> rmKeys; vector<int> rmEdgeKeys; int N = Cn.size(); int sample_size = 0; double subN = 0.8; //set to sample 80% of the current node set size. //generate a sub-sample using bootstrapped method while( sample_size < N*subN ) { int rnd_ind = (int)floor( _rand.doub() * (N+1) ); if(Cn[rnd_ind].c == -1) { Cn[rnd_ind].c = 1; sample_size++; } } for(int i=0; i<Cn.size(); i++) { if(Cn[i].c == -1) { keys[i] = -1; rmKeys.push_back(Cn[i].k); } } for(int i=0; i<rmKeys.size(); i++) { for(int k=0; k<Celist.size(); k++) { if( Celist[k].so == rmKeys[i]) { Celist[k].so = -1; Celist[k].si = -1; } } for(int k=0; k<Celist.size(); k++) { if( Celist[k].si == rmKeys[i]) { Celist[k].so = -1; Celist[k].si = -1; } } } reader.enumerateNodeList(Cn, Celist, n, elist); Cn.swap(emptyn); Celist.swap(emptye); }