string TH_0x01::makeStringFromData(const data_t& data, const options_t& options) { (void)options; string str; size_t byteCount = data.size(); size_t numCount = (size_t) ((data[0] & 0xFF) + ((data[1] << 8) & 0xFF00)); if (byteCount < 2+TH_0x00::dataByteCount || ((byteCount - 2) % TH_0x00::dataByteCount != 0) || (numCount != (size_t)((byteCount - 2) / TH_0x00::dataByteCount)) || numCount > 999) { std::cerr << "Invalid data array. Needs to contain 2+" + to_string(TH_0x00::dataByteCount) + "*n bytes" << endl; return ""; } str = "{"; for (size_t i = 2, num = 0; i < byteCount; i += TH_0x00::dataByteCount, num++) { str += TH_0x00::makeStringFromData(data_t(data.begin()+i, data.begin()+i+TH_0x00::dataByteCount)); if (num < numCount - 1) // not last num { str += ','; } } str += "}"; return str; }
bool bin_index_t::dir_node::next(data_t& key,data_t& val) const { validate_key_len(key); validate_index(); data_t head(key.begin(),key.begin()+parent.dir_key_len); data_t tail(key.begin()+parent.dir_key_len,key.end()); datas_t::const_iterator it=std::lower_bound(indexes.begin(),indexes.end(),head,data_less_pr()); if(it==indexes.end())return false; if(*it==head) { validate_sub(head); if(sub_node->next(tail,val)) { key=head; key.insert(key.end(),tail.begin(),tail.end()); return true; } } ++it; if(it==indexes.end())return false; head=*it; validate_sub(head); if(!sub_node->first(tail,val))return false; key=head; key.insert(key.end(),tail.begin(),tail.end()); return true; }
double variation(data_t &dataset, size_t split){ data_t::iterator i; double mean = 0; double variation = 0; for (i = dataset.begin(); i != dataset.end(); i++){ mean += i->at(split); } mean /= dataset.size(); for (i = dataset.begin(); i != dataset.end(); i++){ variation += pow(i->at(split) - mean, 2.0); } variation /= dataset.size(); return variation; }
/////////////////////////////////////////////////////////////////////////////////// // dir_node // bool bin_index_t::dir_node::get(const data_t& key,data_t& val) const { validate_key_len(key); validate_index(); data_t head(key.begin(),key.begin()+parent.dir_key_len); data_t tail(key.begin()+parent.dir_key_len,key.end()); if(!std::binary_search(indexes.begin(),indexes.end(),head,data_less_pr())) return false; validate_sub(head); return sub_node->get(tail,val); }
void CTCPProfile::Compress(const data_t &data, data_t &output) { size_t outputInSize = output.size(); const iphdr* ip = reinterpret_cast<const iphdr*>(&data[0]); const tcphdr* tcp = reinterpret_cast<const tcphdr*>(ip+ip->ihl*4); UpdateIpIdOffset(ip); if (IR_State == state) { CreateIR(ip, tcp, output); } else { CreateCO(ip, tcp, output); } UpdateIpInformation(ip); AdvanceState(false, false); increaseMsn(); // Append payload // TODO, handle TCP options output.insert(output.end(), data.begin() + sizeof(iphdr) + sizeof(tcphdr), data.end()); ++numberOfPacketsSent; dataSizeCompressed += output.size() - outputInSize; dataSizeUncompressed += data.size(); }
bool bin_index_t::dir_node::set(const data_t& key,const data_t& val) { validate_key_len(key); validate_index(); data_t head(key.begin(),key.begin()+parent.dir_key_len); data_t tail(key.begin()+parent.dir_key_len,key.end()); bool already_exists=std::binary_search(indexes.begin(),indexes.end(),head,data_less_pr()); if(!already_exists)create_text_child(head); validate_sub(head); bool r=sub_node->set(tail,val); return r; }
string TH_0x1B::makeStringFromData(const data_t& data, const options_t& options) { (void)options; if (data.size() != dataByteCount) { throw invalid_argument("Empty data array. Needs to contain " + to_string(dataByteCount) + " bytes"); } string coeffR = TH_0x00::makeStringFromData(data_t(data.begin(), data.begin() + TH_0x00::dataByteCount)); string coeffI = TH_0x00::makeStringFromData(data_t(data.begin() + TH_0x00::dataByteCount, data.begin() + 2 * TH_0x00::dataByteCount)); string str = dec2frac(atof(coeffR.c_str())) + "+" + dec2frac(atof(coeffI.c_str())) + "i"; str = regex_replace(str, regex("\\+-"), "-"); return str; }
void session_interface::save_data(data_t const &data,std::string &s) { s.clear(); data_t::const_iterator p; for(p=data.begin(); p!=data.end(); ++p) { packed header(p->first.size(),p->second.exposed,p->second.value.size()); char *ptr=(char *)&header; s.append(ptr,ptr+sizeof(header)); s.append(p->first.begin(),p->first.end()); s.append(p->second.value.begin(),p->second.value.end()); } }
string TH_0x02::makeStringFromData(const data_t& data, const options_t& options) { (void)options; size_t byteCount = data.size(); size_t colCount = data[0]; size_t rowCount = data[1]; if (data.size() < 2+TH_0x00::dataByteCount || colCount < 1 || rowCount < 1 || colCount > 255 || rowCount > 255 || ((byteCount - 2) % TH_0x00::dataByteCount != 0) || (colCount*rowCount != (byteCount - 2) / TH_0x00::dataByteCount)) { std::cerr << "Invalid data array. Needs to contain 1+1+" << TH_0x00::dataByteCount << "*n bytes" << std::endl; return ""; } string str = "["; for (uint i = 2, num = 0; i < byteCount; i += TH_0x00::dataByteCount, num++) { if (num % colCount == 0) // first column { str += "["; } str += TH_0x00::makeStringFromData(data_t(data.begin()+i, data.begin()+i+TH_0x00::dataByteCount)); if (num % colCount < colCount - 1) // not last column { str += ","; } else { str += "]"; } } str += "]"; // TODO: prettified option return str; }
void construct(point &root, data_t dataset){ if (dataset.size() == 0) return; root = new point_t(); vector< double > buff; for (size_t i = 0; i < dataset.at(0).size(); i++){ buff.push_back(variation(dataset, i)); } size_t split = 0; double min; for (size_t i = 0; i < buff.size(); i++){ if (i == 0){ min = buff.at(i); } else{ if (min < buff.at(i)){ min = buff.at(i); split = i; } } } sort(dataset.begin(), dataset.end(), [split](vec_t a, vec_t b){ return a.at(split) < b.at(split); }); size_t middle = dataset.size() / 2; root->split = split; for (size_t i = 0; i < dataset.at(middle).size(); i++){ root->node.push_back(dataset.at(middle).at(i)); } root->dim = root->node.size(); data_t lDataset, rDataset; for (size_t i = 0; i < middle; i++){ lDataset.push_back(dataset.at(i)); } for (size_t i = middle + 1; i < dataset.size(); i++){ rDataset.push_back(dataset.at(i)); } construct(root->left, lDataset); construct(root->right, rDataset); if (root->left) root->left->parent = root; if (root->right) root->right->parent = root; return; }
//min-max splits based on impurity measure bool impurity_splitW_noMiss(data_t data, int& f_split, double& v_split, double imp, boost::numeric::ublas::vector<int>& c_total,args_t& myargs, double (*impurityHandle)(int, boost::numeric::ublas::vector<int>&,double)) { int NF=myargs.features; int num_c=myargs.num_c; double alpha=myargs.alpha; f_split = -1; double min = MY_DBL_MAX, cf; int n = data.size(), i,j ; double imp_l=0.0, imp_r=0.0, imp_m=0.0, imp_max=0.0; //impurity on the left double mind; double v_split_d; for (int f = 1; f < NF; f++) { //for each feature cf=myargs.Costs[f]; sort(data.begin(), data.end(), boost::bind(mysortf, _1,_2, f)); boost::numeric::ublas::vector<int> c_l(num_c,0); //number of examples in each class on the left boost::numeric::ublas::vector<int> c_r(c_total); //number of examples in each class on the right mind = MY_DBL_MAX; //assume no missing data for( i=0;i<n-1;i++){ c_l[data[i]->label]++; c_r[data[i]->label]--; // do not consider splitting here if data is the same as next if (data[i]->features[f] == data[i+1]->features[f]) continue; imp_l=(*impurityHandle)(num_c, c_l, alpha); imp_r=(*impurityHandle)(num_c, c_r, alpha); imp_max=(imp_l < imp_r) ? imp_r: imp_l; if(imp_max<mind){ mind=imp_max; v_split_d = (data[i]->features[f] + data[i+1]->features[f])/2; } } if ((imp-mind>0.0000001) && (cf/(imp-mind) < min)) { min = cf/(imp-mind); f_split = f; v_split = v_split_d; } } return min != MY_DBL_MAX; }
void CTCPProfile::CreateIR(const ROHC::iphdr *ip, const ROHC::tcphdr *tcp, data_t &output) { size_t headerStartIdx = output.size(); if (!largeCID && cid) { output.push_back(CreateShortCID(cid)); } output.push_back(IRv2Packet); if (largeCID) { SDVLEncode(back_inserter(output), cid); } output.push_back(static_cast<uint8_t>(ProfileID())); size_t crcPos = output.size(); // Add zero crc for now output.push_back(0); create_ipv4_static(ip, output); create_tcp_static(tcp, output); create_ipv4_regular_innermost_dynamic(ip, output); //create_tcp_dynamic(msn, reorder_ratio, udp, output); // Calculate CRC uint8_t crc = CRC8(output.begin() + headerStartIdx, output.end()); output[crcPos] = crc; IncreasePacketCount(PT_IR); ++numberOfIRPacketsSent; ++numberOfIRPacketsSinceReset; }
/** static version */ void Convolver::sort_coefficients(data_t& coefficients, const unsigned int partition_size) { const unsigned int buffer_size = partition_size; data_t buffer(buffer_size); int base = 8; unsigned int i, ii; buffer[0] = coefficients[0]; buffer[1] = coefficients[1]; buffer[2] = coefficients[2]; buffer[3] = coefficients[3]; buffer[4] = coefficients[buffer_size / 2]; buffer[5] = coefficients[buffer_size - 1]; buffer[6] = coefficients[buffer_size - 2]; buffer[7] = coefficients[buffer_size - 3]; for (i = 0; i < (buffer_size / 8 - 1); i++) { for (ii = 0; ii < 4; ii++) { buffer[base + ii] = coefficients[base / 2 + ii]; } for (ii = 0; ii < 4; ii++) { buffer[base + 4 + ii] = coefficients[buffer_size - base / 2 - ii]; } base += 8; } std::copy(buffer.begin(), buffer.end(), coefficients.begin()); }
void bin_index_t::file_node::align_key(const data_t& key,index_t& res) const { res.key.resize(aligned_key_len); std::copy(key.begin(),key.end(),res.key.begin()); std::fill(res.key.begin()+key_len,res.key.end(),0); }
iterator begin() { return m_verts.begin(); }
const_iterator begin() const { return m_verts.begin(); }
secure_cell_t(const data_t& password): _password(password.begin(), password.end()), _res(0){}
/** This is an autarc function to precalculate the frequency * domain filter partitions that a \b Convolver needs. It does * not require an instantiation of a \b Convolver. However, it is * not very efficient since an FFT plan is created with every call. * @param container place to store the partitions. * @param filter impulse response of the filter * @param filter_size size of the impulse response * @param partition_size size of the partitions (this is the * partition size that the outside world sees, internally it is twice as long) */ void Convolver::prepare_impulse_response(data_t& container, const float *filter, const unsigned int filter_size, const unsigned int partition_size) { // find out how many complete partitions we have unsigned int no_of_partitions = filter_size / partition_size; // if there is even one more if (filter_size % partition_size) no_of_partitions++; // empty container container.clear(); // allocate memory container.resize(2 * no_of_partitions * partition_size, 0.0f); // define temporary buffers data_t fft_buffer; data_t zeros; // allocate memory and initialize to 0 fft_buffer.resize(2 * partition_size, 0.0f); zeros.resize(2 * partition_size, 0.0f); // create fft plans for halfcomplex data format fftwf_plan fft_plan = fftwf_plan_r2r_1d(2 * partition_size, &fft_buffer[0], &fft_buffer[0], FFTW_R2HC, FFTW_ESTIMATE); // convert filter partitionwise to frequency domain /////// process complete partitions ////////////// for (unsigned int partition = 0u; partition < no_of_partitions - 1; partition++) { std::copy(filter + partition * partition_size, filter + (partition + 1) * partition_size, fft_buffer.begin()); // zero pad std::copy(zeros.begin(), zeros.begin() + partition_size, fft_buffer.begin() + partition_size); // fft fftwf_execute(fft_plan); sort_coefficients(fft_buffer, 2 * partition_size); // add the partition to the filter std::copy(fft_buffer.begin(), fft_buffer.begin() + 2 * partition_size, container.begin() + 2 * partition * partition_size); } ////// end process complete partitions //// process potentially incomplete last partition //////////// // zeros std::copy(zeros.begin(), zeros.end(), fft_buffer.begin()); // add filter coefficients std::copy(filter + (no_of_partitions - 1) * partition_size, filter + filter_size, fft_buffer.begin()); // fft fftwf_execute(fft_plan); sort_coefficients(fft_buffer, 2 * partition_size); // add the partition to the filter std::copy(fft_buffer.begin(), fft_buffer.end(), container.begin() + 2 * (no_of_partitions - 1) * partition_size); ///// end process potentially incomplete partition //////// // clean up fftwf_destroy_plan(fft_plan); }