bool y::ldap::account::load(const data& d) { _uidNumber.readFromLdap(d); if(_uidNumber().get() == 0) { return false; } for(int i = 0; i < d.elms("objectClass"); i++) { if(d.getValue("objectClass", i) == "schoolPerson") { _hasSchoolPersonClass = true; break; } } _uid .readFromLdap(d); _dn .readFromLdap(d); _cn .readFromLdap(d); _sn .readFromLdap(d); _fullName .readFromLdap(d); _homeDir .readFromLdap(d); _wisaID .readFromLdap(d); _wisaName .readFromLdap(d); _mail .readFromLdap(d); _mailAlias .readFromLdap(d); _birthDay .readFromLdap(d); _password .readFromLdap(d); _role .readFromLdap(d); _groupID .readFromLdap(d); _schoolClass .readFromLdap(d); _classChange .readFromLdap(d); _birthPlace .readFromLdap(d); _gender .readFromLdap(d); _adminGroup .readFromLdap(d); _registerID .readFromLdap(d); _nationality .readFromLdap(d); _stemID .readFromLdap(d); _schoolID .readFromLdap(d); _houseNumber .readFromLdap(d); _houseNumberAdd.readFromLdap(d); _city .readFromLdap(d); _postalCode .readFromLdap(d); _street .readFromLdap(d); _country .readFromLdap(d); if(_role().get() == ROLE::NONE) { y::utils::Log().add("a user exists without a valid schoolrole: "); y::utils::Log().add(_dn().get()); } if(d.getValue("krbName" ).size()) _hasKrbName = true; TODO(this can be removed later) if(_fullName().get() == "System User") { string fn = _cn().get(); fn += " "; fn += _sn().get(); _fullName(FULL_NAME(fn), false); } _new = false; return !_new; }
bool operator< (const data& d1, const data& d2) { // return true if d1 is "less than" d2, false otherwise return strcmp(d1.getName(), d2.getName()) < 0; }
data::data(const data & aData) { char tempName[100]; char tempPhone[100]; char tempProduct[100]; char tempEvents[100]; aData.getName(tempName); this->name = new char[strlen(tempName) + 1]; strcpy(name, tempName); aData.getPhone(tempPhone); this->phone = new char[strlen(tempPhone) + 1]; strcpy(phone, tempPhone); aData.getProduct(tempProduct); this->product = new char[strlen(tempProduct) + 1]; strcpy(product, tempProduct); aData.getEvents(tempEvents); this->events = new char [strlen(tempEvents) + 1]; strcpy(events, tempEvents); }
void Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(normalNodeMaxNumChildren, "normalNodeMaxNumChildren"); ar & CreateNVP(splitHistory, "splitHistory"); }
data::data(data& h) { this->address = h.getAddress(); this->features = h.getFeatures(); this->sqFeet = h.getSqFeet(); this->bedrooms = h.getBedrooms(); this->bathrooms = h.getBathrooms(); }
AuthorsBooks::AuthorsBooks(data attributes): Model("AuthorsBooks") { data::iterator iterator; for (iterator = attributes.begin(); iterator != attributes.end(); iterator++) { modelData[iterator->first] = iterator->second; } }
void Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(firstBound, "firstBound"); ar & CreateNVP(secondBound, "secondBound"); ar & CreateNVP(bound, "bound"); ar & CreateNVP(lastDistance, "lastDistance"); }
void DrusillaSelect<MatType>::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(candidateSet, "candidateSet"); ar & CreateNVP(candidateIndices, "candidateIndices"); ar & CreateNVP(l, "l"); ar & CreateNVP(m, "m"); }
void EMFit<InitialClusteringType, CovarianceConstraintPolicy>::Serialize( Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(maxIterations, "maxIterations"); ar & CreateNVP(tolerance, "tolerance"); ar & CreateNVP(clusterer, "clusterer"); ar & CreateNVP(constraint, "constraint"); }
void Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // We just need to serialize each of the members. ar & CreateNVP(mean, "mean"); ar & CreateNVP(covariance, "covariance"); ar & CreateNVP(covLower, "covLower"); ar & CreateNVP(invCov, "invCov"); ar & CreateNVP(logDetCov, "logDetCov"); }
void CF::Serialize(Archive& ar, const unsigned int /* version */) { // This model is simple; just serialize all the members. No special handling // required. using data::CreateNVP; ar & CreateNVP(numUsersForSimilarity, "numUsersForSimilarity"); ar & CreateNVP(rank, "rank"); ar & CreateNVP(w, "w"); ar & CreateNVP(h, "h"); ar & CreateNVP(cleanedData, "cleanedData"); }
bool operator< (const data & aData1, const data & aData2) { char name1[100]; char name2[100]; aData1.getName(name1); aData2.getName(name2); if (strcmp(name1, name2) == -1) return true; else return false; }
void DecisionStump<MatType>::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // This is straightforward; just serialize all of the members of the class. // None need special handling. ar & CreateNVP(classes, "classes"); ar & CreateNVP(bucketSize, "bucketSize"); ar & CreateNVP(splitDimension, "splitDimension"); ar & CreateNVP(split, "split"); ar & CreateNVP(binLabels, "binLabels"); }
int main(){ scanf("%d",&n); if(n==1){ printf("1\n"); return 0; } f[1].make(1); for(i=2;i<<1<=n;i++){ f[i]=f[i-1]; for(u=1;u<<1<i;u++){ v=i-u-1; if(u==v)f[i]=f[i]+f[u]*(f[u]+1)/2; else f[i]=f[i]+f[u]*f[v]; } for(u=1;u*3<i;u++) for(v=u;v*2<i-u;v++){ w=i-1-u-v; if(u==v && v==w)f[i]=f[i]+f[u]*(f[u]+1)*(f[u]+2)/6; if(u==v && v<w)f[i]=f[i]+f[u]*(f[u]+1)/2*f[w]; if(u<v && v==w)f[i]=f[i]+f[v]*(f[v]+1)/2*f[u]; if(u<v && v<w)f[i]=f[i]+f[u]*f[v]*f[w]; } } ans.make(0); ans=ans+f[n>>1]*(f[n>>1]+1)/2; for(u=1;u*3<n;u++) for(v=u;v*2<n-u;v++){ w=n-1-u-v; if(w<<1>=n)continue; if(u==v && v==w)ans=ans+f[u]*(f[u]+1)*(f[u]+2)/6; if(u==v && v<w)ans=ans+f[u]*(f[u]+1)/2*f[w]; if(u<v && v==w)ans=ans+f[v]*(f[v]+1)/2*f[u]; if(u<v && v<w)ans=ans+f[u]*f[v]*f[w]; } for(u=1;u*4<n;u++) for(v=u;v*3<n-u;v++) for(w=v;w*2<n-u-v;w++){ k=n-1-u-v-w; if(k<<1>=n)continue; if(u==v && v==w && w==k)ans=ans+f[u]*(f[u]+1)*(f[u]+2)*(f[u]+3)/24; if(u==v && v==w && w<k)ans=ans+f[u]*(f[u]+1)*(f[u]+2)/6*f[k]; if(u<v && v==w && w==k)ans=ans+f[v]*(f[v]+1)*(f[v]+2)/6*f[u]; if(u==v && v<w && w<k)ans=ans+f[u]*(f[u]+1)/2*f[w]*f[k]; if(u<v && v==w && w<k)ans=ans+f[v]*(f[v]+1)/2*f[u]*f[k]; if(u<v && v<w && w==k)ans=ans+f[w]*(f[w]+1)/2*f[u]*f[v]; if(u==v && v<w && w==k)ans=ans+(f[u]*(f[u]+1)/2)*(f[w]*(f[w]+1)/2); if(u<v && v<w && w<k)ans=ans+f[u]*f[v]*f[w]*f[k]; } ans.print(); return 0; }
animator_type fast_proj(const data& d, const solver_type& solver, const force_type& f, const math::natural& steps) { return [&d, solver,f, &steps](math::real t, math::real dt) { // projections dof::velocity last; dof::velocity v; phys::solver::task task(v); phys::constraint::bilateral::vector lambda; task.lambda = λ task.momentum = d.momentum( f(t, dt), dt ); for(math::natural i = 0; i < steps; ++i) { // generates solver and evaluates constraints auto s = solver(); task.bilateral = (1/dt) * d.system.constraint.bilateral.corrections + d.system.constraint.bilateral.matrix * v ; s.solve( task ); // muhahahaha d.engine.correct( (0.5 * dt ) * (v - last) ); task.momentum = d.system.mass * v; last = v; } d.engine.set(v, d.system.mass * v); }; }
void hashTree::insert(treeNode*& root, data& aData) { char key[100]; char compare[100]; if (root) { root->item->getName(compare); aData.getName(key); } if(!root) { root = new treeNode(&aData); size++; } else if(strcmp(key, compare) < 0) { insert(root->left, aData); } else { insert(root->right, aData); } }
void HilbertRTreeAuxiliaryInformation<TreeType ,HilbertValueType>:: Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(hilbertValue, "hilbertValue"); }
void test_pbes_expression() { namespace p = pbes_system::pbes_expr; variable x1("x1", basic_sort("X")); pbes_expression e = x1; data_expression x2 = mcrl2::pbes_system::accessors::val(e); BOOST_CHECK(x1 == x2); }
train_root ( const data& X, // data points const train_options& opt // training options ) : root <T>( opt.books, // C capacity(X.length(), opt), // J split(X.length(), opt.books) // dim ) { for (size_t c = 0; c < C; c++) { msg::head(info, "training codebook ", c); child[c] = train(X, dim[c], opt); msg::nl(info); } }
void firms::write_log(data& _log) { for (map<int, firm>::iterator i = _firms.begin(); i != _firms.end(); ++i) { _log.setfirmsalary(i->first, (i->second).getsalary()); _log.setfirmprice(i->first, (i->second).getprice()); _log.setfirmsold(i->first, (i->second).getsold()); _log.setfirmworkers(i->first, (i->second).getworkers()); _log.setfirmmoney(i->first, (i->second).getmoney()); _log.setfirmprofit(i->first, (i->second).getprofit()); _log.setfirmdesired(i->first, (i->second).getdesired()); _log.setfirmstock(i->first, (i->second).getstock()); _log.setfirmaction(i->first, (i->second).getaction()); } }
void print(int all) { if ( all ) { cerr<<lt<<' '<<rt<<' '; x.print('\n'); } if ( lt == rt ) { if ( !all ) x.print(' '); return; } int md = (lt + rt)/2; if ( l == NULL ) l = new seg(lt,md); if ( r == NULL ) r = new seg(md+1,rt); l->print(all); r->print(all); }
// log and write 関数 void log_writeData(clock::time_point time_, const data& data_){ if(is_active()){ ofs << data_.strGPGGA() << std::endl; /* ofs << time_to_hms(time_) << FILE_DEVIDE_CHARS << data_.getUseGPS() << FILE_DEVIDE_CHARS << data_.getPos().x << FILE_DEVIDE_CHARS << data_.getPos().y << FILE_DEVIDE_CHARS << data_.getHeight() << FILE_DEVIDE_CHARS << time_to_hms(data_.getUTC()) << FILE_DEVIDE_CHARS << data_.getFail() << FILE_DEVIDE_CHARS << data_.getErr() << std::endl; */ } }
double kolmogorov(int n, data d1, data d2) { std::sort(d1.begin(), d1.end()); std::sort(d2.begin(), d2.begin()); int d = 0; int i=0,j=0; while (i != n || j != n) { if (d1[i] >= d2[j]) { ++j; } if (d1[i] <= d2[j]) { ++i; } d = std::max(d, abs(i - j)); } return d/sqrt(n); }
void GMM::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(gaussians, "gaussians"); ar & CreateNVP(dimensionality, "dimensionality"); // Load (or save) the gaussians. Not going to use the default std::vector // serialize here because it won't call out correctly to Serialize() for each // Gaussian distribution. if (Archive::is_loading::value) dists.resize(gaussians); for (size_t i = 0; i < gaussians; ++i) { std::ostringstream oss; oss << "dist" << i; ar & CreateNVP(dists[i], oss.str()); } ar & CreateNVP(weights, "weights"); }
void readfile(data &dat, const string filename) { object first; ifstream file(filename.c_str()); int value, weight, count1, i = 1; file >> first.value; file >> first.weight; dat.push_back(first); count1 = dat[0].value; //cout << dat[0].value << " " << dat[0].weight << endl; while(i <= count1) { object curr; file >> value; file >> weight; curr.value = value; curr.weight = weight; dat.push_back(curr); //cout << dat[i].value << " " << dat[i].weight << endl; i++; } }
void RSModel::Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; ar & CreateNVP(treeType, "treeType"); ar & CreateNVP(randomBasis, "randomBasis"); ar & CreateNVP(q, "q"); // This should never happen, but just in case... if (Archive::is_loading::value) CleanMemory(); // We'll only need to serialize one of the model objects, based on the type. switch (treeType) { case KD_TREE: ar & CreateNVP(kdTreeRS, "range_search_model"); break; case COVER_TREE: ar & CreateNVP(coverTreeRS, "range_search_model"); break; case R_TREE: ar & CreateNVP(rTreeRS, "range_search_model"); break; case R_STAR_TREE: ar & CreateNVP(rStarTreeRS, "range_search_model"); break; case BALL_TREE: ar & CreateNVP(ballTreeRS, "range_search_model"); break; case X_TREE: ar & CreateNVP(xTreeRS, "range_search_model"); break; } }
bool file_data_loader_work(data& d, FILE* fh, size_t block_size) { std::lock_guard<std::mutex> lock(work_mutex); uint8_t b; while(block_size!=0) { if (fread(&b, 1, 1, fh) == 0) { return true; } d.push_back(b); if(block_size != file_data_loader::nblock) { block_size--; } } return false; }
void item::read( data& v, OPCDATASOURCE source ) { cotask_holder<HRESULT> itemResult; cotask_holder<OPCITEMSTATE> itemState; HRESULT result = owner_.sync_io()->Read(source, 1, &handle_, itemState.addr(), itemResult.addr()); if (FAILED(result)) { throw opc_exception( result, OLESTR("Read failed") ); } if (FAILED(itemResult[0])) { throw opc_exception( itemResult[0], OLESTR("Read failed") ); } v.set(itemState[0]); VariantClear(&itemState[0].vDataValue); }
void BinarySpaceTree<MetricType, StatisticType, MatType, BoundType, SplitType>:: Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // If we're loading, and we have children, they need to be deleted. if (Archive::is_loading::value) { if (left) delete left; if (right) delete right; if (!parent) delete dataset; } ar & CreateNVP(parent, "parent"); ar & CreateNVP(begin, "begin"); ar & CreateNVP(count, "count"); ar & CreateNVP(bound, "bound"); ar & CreateNVP(stat, "statistic"); ar & CreateNVP(parentDistance, "parentDistance"); ar & CreateNVP(furthestDescendantDistance, "furthestDescendantDistance"); ar & CreateNVP(dataset, "dataset"); // Save children last; otherwise boost::serialization gets confused. ar & CreateNVP(left, "left"); ar & CreateNVP(right, "right"); // Due to quirks of boost::serialization, if a tree is saved as an object and // not a pointer, the first level of the tree will be duplicated on load. // Therefore, if we are the root of the tree, then we need to make sure our // children's parent links are correct, and delete the duplicated node if // necessary. if (Archive::is_loading::value) { // Get parents of left and right children, or, NULL, if they don't exist. BinarySpaceTree* leftParent = left ? left->Parent() : NULL; BinarySpaceTree* rightParent = right ? right->Parent() : NULL; // Reassign parent links if necessary. if (left && left->Parent() != this) left->Parent() = this; if (right && right->Parent() != this) right->Parent() = this; // Do we need to delete the left parent? if (leftParent != NULL && leftParent != this) { // Sever the duplicate parent's children. Ensure we don't delete the // dataset, by faking the duplicated parent's parent (that is, we need to // set the parent to something non-NULL; 'this' works). leftParent->Parent() = this; leftParent->Left() = NULL; leftParent->Right() = NULL; delete leftParent; } // Do we need to delete the right parent? if (rightParent != NULL && rightParent != this && rightParent != leftParent) { // Sever the duplicate parent's children, in the same way as above. rightParent->Parent() = this; rightParent->Left() = NULL; rightParent->Right() = NULL; delete rightParent; } } }
void RectangleTree<MetricType, StatisticType, MatType, SplitType, DescentType, AuxiliaryInformationType>:: Serialize(Archive& ar, const unsigned int /* version */) { using data::CreateNVP; // Clean up memory, if necessary. if (Archive::is_loading::value) { for (size_t i = 0; i < numChildren; i++) delete children[i]; children.clear(); if (ownsDataset && dataset) delete dataset; } ar & CreateNVP(maxNumChildren, "maxNumChildren"); ar & CreateNVP(minNumChildren, "minNumChildren"); ar & CreateNVP(numChildren, "numChildren"); // Due to quirks of boost::serialization, depending on how the user serializes // the tree, the root node may be duplicated. Therefore we don't allow // children of the root to serialize the parent, and we fix the parent link // after serializing the children when loading below. if (Archive::is_saving::value && parent != NULL && parent->Parent() == NULL) { RectangleTree* fakeParent = NULL; ar & CreateNVP(fakeParent, "parent"); } else { ar & CreateNVP(parent, "parent"); } ar & CreateNVP(begin, "begin"); ar & CreateNVP(count, "count"); ar & CreateNVP(numDescendants, "numDescendants"); ar & CreateNVP(maxLeafSize, "maxLeafSize"); ar & CreateNVP(minLeafSize, "minLeafSize"); ar & CreateNVP(bound, "bound"); ar & CreateNVP(stat, "stat"); ar & CreateNVP(parentDistance, "parentDistance"); ar & CreateNVP(dataset, "dataset"); // If we are loading and we are the root, we own the dataset. if (Archive::is_loading::value && parent == NULL) ownsDataset = true; ar & CreateNVP(points, "points"); ar & CreateNVP(auxiliaryInfo, "auxiliaryInfo"); // Because 'children' holds mlpack types (that have Serialize()), we can't use // the std::vector serialization. if (Archive::is_loading::value) children.resize(numChildren); for (size_t i = 0; i < numChildren; ++i) { std::ostringstream oss; oss << "child" << i; ar & CreateNVP(children[i], oss.str()); } // Fix the parent links for the children, if necessary. if (Archive::is_loading::value && parent == NULL) { // Look through each child individually. for (size_t i = 0; i < children.size(); ++i) { children[i]->ownsDataset = false; children[i]->Parent() = this; } } }