void load_volumes(strings& s,grids& vols,int skip,bool verbose) { vols.resize(s.size()-skip); //first column is eigenvalue for(int i=0;i<(s.size()-skip);i++) { /*if(verbose) std::cout<<s[i+skip].c_str()<<" "<<std::flush;*/ minc_1_reader rdr; rdr.open(s[i+skip].c_str()); load_simple_volume(rdr,vols[i]); } }
void load_volumes(strings& s,volumes& averages,int skip,bool verbose,bool ignore_missing) { averages.resize(s.size()-skip); //first column is eigenvalue for(int i=0;i<(s.size()-skip);i++) { minc_1_reader rdr; try { rdr.open(s[i+skip].c_str()); load_simple_volume<float>(rdr,averages[i]); } catch(const minc::generic_error & err) { if(!ignore_missing) throw; } } }
void save_volumes(strings& s,volumes& vols,const char *like,const std::string & append_history) { for(int i=0;i<s.size();i++) { save_volume(vols[i],s[i].c_str(),like,append_history); } }
RevKBCCTrainer::reverbdpoint RevKBCCTrainer::ReverberateNetwork( strings subView /*= strings(0)*/ ) { //changing the clean reverb to only threshold the outputs. //if the numcontexts is set above 0 then we'll get them from the dataset. int inSize = revNet->GetNumIn(); if (revparams.numContexts > 0 || subView.size() > 0) { //assert(revparams.numContexts > 0); if(revparams.numContexts == 0) { revparams.numContexts = subView.size(); } inSize -= revparams.numContexts; } vecDouble tmpIn(inSize); for (uint i = 0; i < tmpIn.size(); i++) { tmpIn[i] = revRand(); } if (revparams.numContexts > 0) { vecDouble randContext; if (subView.size() > 0) { randContext = dynamic_pointer_cast<CSMTLDataset>(data)->GetRandContext(subView); } else { randContext = dynamic_pointer_cast<CSMTLDataset>(data)->GetRandContext(); } tmpIn = ConcatVec(tmpIn,randContext); } revNet->Activate(tmpIn); for (uint i = 1; i < revparams.numRev; i++) { tmpIn = revNet->GetAutoAssocLayer()->GetOutput(); revNet->Activate(tmpIn); } reverbdpoint retVal; retVal.inPoint = tmpIn; retVal.outPoint = revNet->GetNormOutLayer()->GetOutput(); if(revparams.cleanReverb) { //This shouldn't be used when the outputs are real. retVal.outPoint = ApplyThreshold(retVal.outPoint); } #ifdef _DEBUG cout << "in: " << retVal.inPoint << "out" << retVal.outPoint << endl; #endif return retVal; }
bool nsUtils::runCmd(const string& cmd, const strings& input, strings& output) { int childStdIn[2]; // child read 0, parent write 1 int childStdOut[2]; // parent read 0, child write 1 if (pipe(childStdIn) != 0) { std::cerr << "Failed to create pipe 1\n"; return false; } if (pipe(childStdOut) != 0) { std::cerr << "Failed to create pipe 2\n"; return false; } pid_t child(fork()); if (child == -1) { std::cerr << "Failed to fork\n"; close(childStdIn[0]); close(childStdIn[1]); close(childStdOut[0]); close(childStdOut[1]); return false; } if (child == 0) { // child close(childStdIn[1]); close(childStdOut[0]); dup2(childStdIn[0], 0); dup2(childStdOut[1], 1); execl(cmd.c_str(), cmd.c_str(), NULL); exit(1); } // parent close(childStdIn[0]); close(childStdOut[1]); char lineEnd('\n'); for (size_t i = 0; i < input.size(); ++i) { write(childStdIn[1], input[i].c_str(), input[i].length()); write(childStdIn[1], &lineEnd, 1); } close(childStdIn[1]); ssize_t readCount; const int bufferSize(512); char buffer[bufferSize + 1]; while ((readCount = read(childStdOut[0], buffer, bufferSize)) > 0) { buffer[readCount] = '\0'; if (buffer[readCount - 1] == '\n') { buffer[readCount - 1] = '\0'; } output.push_back(string(buffer)); } close(childStdOut[0]); wait(NULL); return true; }
void HttpInterface::sendSetVariable(const std::string nodeName, const strings& args) { // get node id, variable position and length if (verbose) cerr << "setVariables " << nodeName << " " << args[0]; unsigned nodePos, varPos; const bool exists(getNodeAndVarPos(nodeName, args[0], nodePos, varPos)); if (!exists) return; if (verbose) cerr << " (" << nodePos << "," << varPos << "):" << args.size()-1 << endl; // send the message SetVariables::VariablesVector data; for (size_t i=1; i<args.size(); ++i) data.push_back(atoi(args[i].c_str())); SetVariables setVariables(nodePos, varPos, data); setVariables.serialize(asebaStream); asebaStream->flush(); }
void Shell::emit(const strings& args) { // check that there are enough arguments if (args.size() < 2) { wcerr << "missing argument, usage: emit EVENT_NAME EVENT_DATA*" << endl; return; } size_t pos; if (!commonDefinitions.events.contains(UTF8ToWString(args[1]), &pos)) { wcerr << "event " << UTF8ToWString(args[1]) << " is unknown" << endl; return; } // build event and emit VariablesDataVector data; for (size_t i=2; i<args.size(); ++i) data.push_back(atoi(args[i].c_str())); UserMessage userMessage(pos, data); userMessage.serialize(targetStream); targetStream->flush(); }
void Shell::setVariable(const strings& args) { // check that there are enough arguments if (args.size() < 4) { wcerr << "missing argument, usage: set NODE_NAME VAR_NAME VAR_DATA+" << endl; return; } // get node id, variable position and length unsigned nodeId, pos; const bool exists(getNodeAndVarPos(args[1], args[2], nodeId, pos)); if (!exists) return; // send the message VariablesDataVector data; for (size_t i=3; i<args.size(); ++i) data.push_back(atoi(args[i].c_str())); SetVariables setVariables(nodeId, pos, data); setVariables.serialize(targetStream); targetStream->flush(); }
void filterOnlyImages(strings& files) { strings out; // O(N) for (size_t u = 0; u < files.size(); u++) { if (isSupportedImageType(files[u])) { out.push_back(files[u]); } } files = out; }
void HttpInterface::sendEvent(const std::string nodeName, const strings& args) { size_t eventPos; if (commonDefinitions.events.contains(UTF8ToWString(args[0]), &eventPos)) { // build event and emit UserMessage::DataVector data; for (size_t i=1; i<args.size(); ++i) data.push_back(atoi(args[i].c_str())); UserMessage userMessage(eventPos, data); userMessage.serialize(asebaStream); asebaStream->flush(); } else if (verbose) cerr << "sendEvent " << nodeName << ": no event " << args[0] << endl; }
void HttpInterface::evSubscribe(HttpRequest* req, strings& args) { // eventSubscriptions[conn] is an unordered set of strings if (args.size() == 1) eventSubscriptions[req].insert("*"); else for (strings::iterator i = args.begin()+1; i != args.end(); ++i) eventSubscriptions[req].insert(*i); strings headers; headers.push_back("Content-Type: text/event-stream"); headers.push_back("Cache-Control: no-cache"); headers.push_back("Connection: keep-alive"); addHeaders(req, headers); appendResponse(req,200,true,""); // connection must stay open! }
void Shell::run(const strings& args) { // check arguments if (args.size() != 2) { wcerr << "wrong number of arguments, usage: run NODE_NAME" << endl; return; } bool ok; const unsigned nodeId(getNodeId(UTF8ToWString(args[1]), 0, &ok)); if (!ok) { wcerr << "invalid node name " << UTF8ToWString(args[1]) << endl; return; } // build run message and send Run runMsg(nodeId); runMsg.serialize(targetStream); targetStream->flush(); }
string inUnits(bigint const& _b, strings const& _units) { ostringstream ret; u256 b; if (_b < 0) { ret << "-"; b = (u256)-_b; } else b = (u256)_b; u256 biggest = 1; for (unsigned i = _units.size() - 1; !!i; --i) biggest *= 1000; if (b > biggest * 1000) { ret << (b / biggest) << " " << _units.back(); return ret.str(); } ret << setprecision(3); u256 unit = biggest; for (auto it = _units.rbegin(); it != _units.rend(); ++it) { auto i = *it; if (i != _units.front() && b >= unit) { ret << (double(b / (unit / 1000)) / 1000.0) << " " << i; return ret.str(); } else unit /= 1000; } ret << b << " " << _units.front(); return ret.str(); }
void Shell::getVariable(const strings& args) { // check that there is the correct number of arguments if (args.size() != 3) { wcerr << "wrong number of arguments, usage: get NODE_NAME VAR_NAME" << endl; return; } // get node id, variable position and length unsigned nodeId, pos; const bool exists(getNodeAndVarPos(args[1], args[2], nodeId, pos)); if (!exists) return; bool ok; const unsigned length(getVariableSize(nodeId, UTF8ToWString(args[2]), &ok)); if (!ok) return; // send the message GetVariables getVariables(nodeId, pos, length); getVariables.serialize(targetStream); targetStream->flush(); }
bool EthereumCapability::interpretCapabilityPacket( NodeID const& _peerID, unsigned _id, RLP const& _r) { auto& peer = m_peers[_peerID]; peer.setLastAsk(std::chrono::system_clock::to_time_t(chrono::system_clock::now())); try { switch (_id) { case StatusPacket: { auto const peerProtocolVersion = _r[0].toInt<unsigned>(); auto const networkId = _r[1].toInt<u256>(); auto const totalDifficulty = _r[2].toInt<u256>(); auto const latestHash = _r[3].toHash<h256>(); auto const genesisHash = _r[4].toHash<h256>(); LOG(m_logger) << "Status (from " << _peerID << "): " << peerProtocolVersion << " / " << networkId << " / " << genesisHash << ", TD: " << totalDifficulty << " = " << latestHash; peer.setStatus( peerProtocolVersion, networkId, totalDifficulty, latestHash, genesisHash); setIdle(_peerID); m_peerObserver->onPeerStatus(peer); break; } case TransactionsPacket: { m_peerObserver->onPeerTransactions(_peerID, _r); break; } case GetBlockHeadersPacket: { /// Packet layout: /// [ block: { P , B_32 }, maxHeaders: P, skip: P, reverse: P in { 0 , 1 } ] const auto blockId = _r[0]; const auto maxHeaders = _r[1].toInt<u256>(); const auto skip = _r[2].toInt<u256>(); const auto reverse = _r[3].toInt<bool>(); auto numHeadersToSend = maxHeaders <= c_maxHeadersToSend ? static_cast<unsigned>(maxHeaders) : c_maxHeadersToSend; if (skip > std::numeric_limits<unsigned>::max() - 1) { LOG(m_loggerDetail) << "Requested block skip is too big: " << skip << " (peer: " << _peerID << ")"; break; } pair<bytes, unsigned> const rlpAndItemCount = m_hostData->blockHeaders(blockId, numHeadersToSend, skip, reverse); RLPStream s; m_host->prep(_peerID, name(), s, BlockHeadersPacket, rlpAndItemCount.second) .appendRaw(rlpAndItemCount.first, rlpAndItemCount.second); m_host->sealAndSend(_peerID, s); m_host->updateRating(_peerID, 0); break; } case BlockHeadersPacket: { if (peer.asking() != Asking::BlockHeaders) LOG(m_loggerImpolite) << "Peer " << _peerID << " giving us block headers when we didn't ask for them."; else { setIdle(_peerID); m_peerObserver->onPeerBlockHeaders(_peerID, _r); } break; } case GetBlockBodiesPacket: { unsigned count = static_cast<unsigned>(_r.itemCount()); LOG(m_logger) << "GetBlockBodies (" << dec << count << " entries) from " << _peerID; if (!count) { LOG(m_loggerImpolite) << "Zero-entry GetBlockBodies: Not replying to " << _peerID; m_host->updateRating(_peerID, -10); break; } pair<bytes, unsigned> const rlpAndItemCount = m_hostData->blockBodies(_r); m_host->updateRating(_peerID, 0); RLPStream s; m_host->prep(_peerID, name(), s, BlockBodiesPacket, rlpAndItemCount.second) .appendRaw(rlpAndItemCount.first, rlpAndItemCount.second); m_host->sealAndSend(_peerID, s); break; } case BlockBodiesPacket: { if (peer.asking() != Asking::BlockBodies) LOG(m_loggerImpolite) << "Peer " << _peerID << " giving us block bodies when we didn't ask for them."; else { setIdle(_peerID); m_peerObserver->onPeerBlockBodies(_peerID, _r); } break; } case NewBlockPacket: { m_peerObserver->onPeerNewBlock(_peerID, _r); break; } case NewBlockHashesPacket: { unsigned itemCount = _r.itemCount(); LOG(m_logger) << "BlockHashes (" << dec << itemCount << " entries) " << (itemCount ? "" : " : NoMoreHashes") << " from " << _peerID; if (itemCount > c_maxIncomingNewHashes) { disablePeer(_peerID, "Too many new hashes"); break; } vector<pair<h256, u256>> hashes(itemCount); for (unsigned i = 0; i < itemCount; ++i) hashes[i] = std::make_pair(_r[i][0].toHash<h256>(), _r[i][1].toInt<u256>()); m_peerObserver->onPeerNewHashes(_peerID, hashes); break; } case GetNodeDataPacket: { unsigned count = static_cast<unsigned>(_r.itemCount()); if (!count) { LOG(m_loggerImpolite) << "Zero-entry GetNodeData: Not replying to " << _peerID; m_host->updateRating(_peerID, -10); break; } LOG(m_logger) << "GetNodeData (" << dec << count << " entries) from " << _peerID; strings const data = m_hostData->nodeData(_r); m_host->updateRating(_peerID, 0); RLPStream s; m_host->prep(_peerID, name(), s, NodeDataPacket, data.size()); for (auto const& element : data) s.append(element); m_host->sealAndSend(_peerID, s); break; } case GetReceiptsPacket: { unsigned count = static_cast<unsigned>(_r.itemCount()); if (!count) { LOG(m_loggerImpolite) << "Zero-entry GetReceipts: Not replying to " << _peerID; m_host->updateRating(_peerID, -10); break; } LOG(m_logger) << "GetReceipts (" << dec << count << " entries) from " << _peerID; pair<bytes, unsigned> const rlpAndItemCount = m_hostData->receipts(_r); m_host->updateRating(_peerID, 0); RLPStream s; m_host->prep(_peerID, name(), s, ReceiptsPacket, rlpAndItemCount.second) .appendRaw(rlpAndItemCount.first, rlpAndItemCount.second); m_host->sealAndSend(_peerID, s); break; } case NodeDataPacket: { if (peer.asking() != Asking::NodeData) LOG(m_loggerImpolite) << "Peer " << _peerID << " giving us node data when we didn't ask for them."; else { setIdle(_peerID); m_peerObserver->onPeerNodeData(_peerID, _r); } break; } case ReceiptsPacket: { if (peer.asking() != Asking::Receipts) LOG(m_loggerImpolite) << "Peer " << _peerID << " giving us receipts when we didn't ask for them."; else { setIdle(_peerID); m_peerObserver->onPeerReceipts(_peerID, _r); } break; } default: return false; } } catch (Exception const&) { LOG(m_loggerWarn) << "Peer " << _peerID << " causing an exception: " << boost::current_exception_diagnostic_information() << " " << _r; } catch (std::exception const& _e) { LOG(m_loggerWarn) << "Peer " << _peerID << " causing an exception: " << _e.what() << " " << _r; } return true; }
void HttpInterface::evVariableOrEvent(HttpRequest* req, strings& args) { string nodeName(args[0]); size_t eventPos; if ( ! commonDefinitions.events.contains(UTF8ToWString(args[1]), &eventPos)) { // this is a variable if (req->method.find("POST") == 0 || args.size() >= 3) { // set variable value strings values; if (args.size() >= 3) values.assign(args.begin()+1, args.end()); else { // Parse POST form data values.push_back(args[1]); parse_json_form(req->content, values); } if (values.size() == 0) { finishResponse(req, 404, ""); if (verbose) cerr << req << " evVariableOrEevent 404 can't set variable " << args[0] << ", no values" << endl; return; } sendSetVariable(nodeName, values); finishResponse(req, 200, ""); if (verbose) cerr << req << " evVariableOrEevent 200 set variable " << values[0] << endl; } else { // get variable value strings values; values.assign(args.begin()+1, args.begin()+2); unsigned source, start; if ( ! getNodeAndVarPos(nodeName, values[0], source, start)) { finishResponse(req, 404, ""); if (verbose) cerr << req << " evVariableOrEevent 404 no such variable " << values[0] << endl; return; } sendGetVariables(nodeName, values); pendingVariables[std::make_pair(source,start)].insert(req); if (verbose) cerr << req << " evVariableOrEevent schedule var " << values[0] << "(" << source << "," << start << ") add " << req << " to subscribers" << endl; return; } } else { // this is an event // arguments are args 1..N strings data; data.push_back(args[1]); if (args.size() >= 3) for (size_t i=2; i<args.size(); ++i) data.push_back((args[i].c_str())); else if (req->method.find("POST") == 0) { // Parse POST form data parse_json_form(std::string(req->content, req->content.size()), data); } sendEvent(nodeName, data); finishResponse(req, 200, ""); // or perhaps {"return_value":null,"cmd":"sendEvent","name":nodeName}? return; } }
void HttpInterface::evNodes(HttpRequest* req, strings& args) { bool do_one_node(args.size() > 0); std::stringstream json; json << (do_one_node ? "" : "["); for (NodesMap::iterator descIt = nodes.begin(); descIt != nodes.end(); ++descIt) { const Node& description(descIt->second); string nodeName = WStringToUTF8(description.name); json << "{"; json << "\"name\":\"" << nodeName << "\",\"protocolVersion\":" << description.protocolVersion; if (do_one_node) { json << ",\"bytecodeSize\":" << description.bytecodeSize; json << ",\"variablesSize\":" <<description.variablesSize; json << ",\"stackSize\":" << description.stackSize; // named variables json << ",\"namedVariables\":{"; bool seen_named_variables = false; for (NodeNameVariablesMap::const_iterator n(allVariables.find(nodeName)); n != allVariables.end(); ++n) { VariablesMap vm = n->second; for (VariablesMap::iterator i = vm.begin(); i != vm.end(); ++i) { json << (i == vm.begin() ? "" : ",") << "\"" << WStringToUTF8(i->first) << "\":" << i->second.second; seen_named_variables = true; } } if ( ! seen_named_variables ) { // failsafe: if compiler hasn't found any variables, get them from the node description for (vector<Aseba::TargetDescription::NamedVariable>::const_iterator i(description.namedVariables.begin()); i != description.namedVariables.end(); ++i) json << (i == description.namedVariables.begin() ? "" : ",") << "\"" << WStringToUTF8(i->name) << "\":" << i->size; } json << "}"; // local events variables json << ",\"localEvents\":{"; for (size_t i = 0; i < description.localEvents.size(); ++i) { string ev(WStringToUTF8(description.localEvents[i].name)); json << (i == 0 ? "" : ",") << "\"" << ev << "\":" << "\"" << WStringToUTF8(description.localEvents[i].description) << "\""; } json << "}"; // constants from introspection json << ",\"constants\":{"; for (size_t i = 0; i < commonDefinitions.constants.size(); ++i) json << (i == 0 ? "" : ",") << "\"" << WStringToUTF8(commonDefinitions.constants[i].name) << "\":" << commonDefinitions.constants[i].value; json << "}"; // events from introspection json << ",\"events\":{"; for (size_t i = 0; i < commonDefinitions.events.size(); ++i) json << (i == 0 ? "" : ",") << "\"" << WStringToUTF8(commonDefinitions.events[i].name) << "\":" << commonDefinitions.events[i].value; json << "}"; } json << "}"; } json <<(do_one_node ? "" : "]"); finishResponse(req,200,json.str()); }
void Shell::load(const strings& args) { // check that there is the correct number of arguments if (args.size() != 2) { wcerr << "wrong number of arguments, usage: load FILENAME" << endl; return; } // open document const string& fileName(args[1]); xmlDoc *doc(xmlReadFile(fileName.c_str(), nullptr, 0)); if (!doc) { wcerr << "cannot read XML from file " << UTF8ToWString(fileName) << endl; return; } xmlNode *domRoot(xmlDocGetRootElement(doc)); // clear existing data commonDefinitions.events.clear(); commonDefinitions.constants.clear(); allVariables.clear(); // load new data int noNodeCount(0); bool wasError(false); if (!xmlStrEqual(domRoot->name, BAD_CAST("network"))) { wcerr << "root node is not \"network\", XML considered invalid" << endl; wasError = true; } else for (xmlNode *domNode(xmlFirstElementChild(domRoot)); domNode; domNode = domNode->next) { if (domNode->type == XML_ELEMENT_NODE) { // an Aseba node, which contains a virtual machine if (xmlStrEqual(domNode->name, BAD_CAST("node"))) { // get attributes, child and content xmlChar *name(xmlGetProp(domNode, BAD_CAST("name"))); if (!name) { wcerr << "missing \"name\" attribute in \"node\" entry" << endl; } else { const string _name((const char *)name); xmlChar * text(xmlNodeGetContent(domNode)); if (!text) { wcerr << "missing text in \"node\" entry" << endl; } else { // got the identifier of the node and compile the code unsigned preferedId(0); xmlChar *storedId = xmlGetProp(domNode, BAD_CAST("nodeId")); if (storedId) preferedId = unsigned(atoi((char*)storedId)); bool ok; unsigned nodeId(getNodeId(UTF8ToWString(_name), preferedId, &ok)); if (ok) { if (!compileAndSendCode(UTF8ToWString((const char *)text), nodeId, _name)) wasError = true; } else noNodeCount++; // free attribute and content xmlFree(text); } xmlFree(name); } } // a global event else if (xmlStrEqual(domNode->name, BAD_CAST("event"))) { // get attributes xmlChar *name(xmlGetProp(domNode, BAD_CAST("name"))); if (!name) wcerr << "missing \"name\" attribute in \"event\" entry" << endl; xmlChar *size(xmlGetProp(domNode, BAD_CAST("size"))); if (!size) wcerr << "missing \"size\" attribute in \"event\" entry" << endl; // add event if (name && size) { int eventSize(atoi((const char *)size)); if (eventSize > ASEBA_MAX_EVENT_ARG_SIZE) { wcerr << "Event " << name << " has a length " << eventSize << "larger than maximum" << ASEBA_MAX_EVENT_ARG_SIZE << endl; wasError = true; break; } else { commonDefinitions.events.push_back(NamedValue(UTF8ToWString((const char *)name), eventSize)); } } // free attributes if (name) xmlFree(name); if (size) xmlFree(size); } // a global constant else if (xmlStrEqual(domNode->name, BAD_CAST("constant"))) { // get attributes xmlChar *name(xmlGetProp(domNode, BAD_CAST("name"))); if (!name) wcerr << "missing \"name\" attribute in \"constant\" entry" << endl; xmlChar *value(xmlGetProp(domNode, BAD_CAST("value"))); if (!value) wcerr << "missing \"value\" attribute in \"constant\" entry" << endl; // add constant if attributes are valid if (name && value) { commonDefinitions.constants.push_back(NamedValue(UTF8ToWString((const char *)name), atoi((const char *)value))); } // free attributes if (name) xmlFree(name); if (value) xmlFree(value); } else wcerr << "Unknown XML node seen in .aesl file: " << domNode->name << endl; } } // release memory xmlFreeDoc(doc); // check if there was an error if (wasError) { wcerr << "There was an error while loading script " << UTF8ToWString(fileName) << endl; commonDefinitions.events.clear(); commonDefinitions.constants.clear(); allVariables.clear(); } // check if there was some matching problem if (noNodeCount) { wcerr << noNodeCount << " scripts have no corresponding nodes in the current network and have not been loaded." << endl; } }
pobj cmd_t::load_json ( const strings& args ) { return load_json ( args.size() > 2 ? args[2] : L"" ); }
size_t aggregation(vector<string> &keyIns,vector<int> &keyPrefixIns, vector<int> &keyActionIns, vector<size_t> &maskes, int actionSize, float &storage, bool isInit, int &fingerprintOld,vector<int> &uniqueAggKeyprefixes, strings& overKeys, size_ts& overKeyNos, floats& haoOvers, float target) { // --------------------------- vector<size_t> indexes; clusterAction(keyIns, keyActionIns, maskes,indexes); // ----------------------------- // find min value and max value for each subtrie vector<size_t> IPUpperBound; vector<size_t> IPLowerBound; IPLowerBound.push_back(parseIPV4string("0.0.0.0")); IPUpperBound.push_back(parseIPV4string("0.255.255.255")); for(uint16_t i = 0; i < indexes.size()-1; i ++) { size_t lowerBound = parseIPV4string(keyIns[indexes[i]].c_str()) & maskes[0]; size_t upperBound = lowerBound + (1<<24)-1; IPLowerBound.push_back(lowerBound); IPUpperBound.push_back(upperBound); } // ---------------------------------------------------------- // Define Tries uint16_t trieNum = indexes.size(); // trie number Trie *bTrie; // define trees bTrie = new Trie[trieNum]; // define trees // --------------------------------------------------------- /* Insert keys to trie */ insertWordTrieSimple(bTrie, trieNum, indexes, keyIns, keyPrefixIns, keyActionIns); // ------------------------------------------------------------- // classify tries according to actions ActionOrder actionOrder[actionSize]; cout<<"* ActionOder clustering ..."<<endl; for(int ai = 0; ai < actionSize; ai++) { actionOrder[ai].aTrieOder.clear(); } for(int ai = 0; ai < actionSize; ai++) { for (int ti = 0; ti < indexes.size(); ti++) { if (bTrie[ti].maction == ai) { actionOrder[ai].aTrieOder.push_back(ti); } } } // ------------------------------------------------------------------------- /* Aggregation */ /* Init variables */ size_t countKey = 0; size_t countAggregateKey = 0; size_t countBlackKey =0; size_t countOriKey =0; vector<string> keys; vector<int> keyActions; vector<string> blackKeys; vector<int> blackkeyPrefixes; vector<string> aggregateKeys; int aggrPrefixlength = AGGR_PREFIX; for(int ai = 0; ai < actionSize; ai++) { g_vcountkey[ai] = 0; g_vcountblackkey[ai] = 0; // ---------------------------------------------------------- /* aggregate Trie */ aggregateTrie(bTrie, ai, actionOrder, countKey,countAggregateKey, countBlackKey,countOriKey, keys, keyActions,blackKeys,blackkeyPrefixes, aggregateKeys, aggrPrefixlength, 1, isInit); cout<<"Action: "<<ai<<" threshold: "<<g_vweightThld[ai]<<" "; } cout<<endl; cout<<"* orikey "<<countOriKey<<" agg "<< countAggregateKey<<" blkey "<<countBlackKey<<" countKey "<<countKey<<endl; // ------------------------------------------------ // Find the actions to compress and decompress ints compressActions; ints decompressActions; compressAct(haoOvers, target, actionSize, compressActions, decompressActions); // -------------------------------------------- // Decompress aggregate keys strings aggrIPs; ints aggrPrefixes; size_t aggrCount = 0; cout<<"overbig Keys size: "<<overKeys.size()<<endl; for(size_t i = 0; i< overKeys.size(); i++) { size_t ipInt = parseIPV4string(overKeys[i].c_str()); // lookup key in aggregate table, if inside, get the aggregate key bool isAggregatekey = 0; //if(cuckooAggrKeyTable.mm > 1) { for(int mi = 0; mi < uniqueAggKeyprefixes.size(); mi++) { size_t subIP = ipInt & maskes[uniqueAggKeyprefixes[mi]-8]; string flowstr = parsedec2IPV4(subIP); int prefix = uniqueAggKeyprefixes[mi]; isAggregatekey = cuckooAggrKeyTable.LookUpKey(flowstr,prefix); if (isAggregatekey) { aggrIPs.push_back(flowstr); aggrPrefixes.push_back(prefix); // Get the aggregate keys and decompress them for(uint16_t ti = 0; ti < trieNum; ti++) { if(ipInt >= IPLowerBound[ti] && ipInt < IPUpperBound[ti]) { // If the action is to compress, no need to decompress the aggregate keys for(int ai = 0; ai <compressActions.size(); ai++) { if(ti/64 == compressActions[ai]) { break; } } bTrie[ti].searchAggrPrefix(DecToBin(subIP),prefix, aggrCount); break; } } // Find one match and break break; } } } } // ------------------------------------------------ // load aggregate previous keys and decompress them ifstream aggrFile(AGGRFILENAME.c_str()); string aggrIPStr; int aggrPrefix; while(aggrFile>>aggrIPStr>>aggrPrefix && aggrIPs.size()<3500) { aggrIPs.push_back(aggrIPStr); aggrPrefixes.push_back(aggrPrefix); size_t aggrIPInt = parseIPV4string(aggrIPStr.c_str()); // get the aggregate key for(uint16_t ti = 0; ti < trieNum; ti++) { if(aggrIPInt >= IPLowerBound[ti] && aggrIPInt < IPUpperBound[ti]) { /*for(int ai = 0; ai <compressActions.size(); ai++) { if(ti/64 == compressActions[ai]) { break; } }*/ bTrie[ti].searchAggrPrefix(DecToBin(aggrIPInt),aggrPrefix, aggrCount); break; } } } aggrFile.clear(); aggrFile.close(); vector<size_t>().swap(IPLowerBound); vector<size_t>().swap(IPUpperBound); //--------------------------------------- // write to aggr file; ofstream aggrFileOut("aggrFile"); for(size_t i = 0; i < aggrIPs.size(); i++) { aggrFileOut<<aggrIPs[i]<<" "<<aggrPrefixes[i]<<endl; } aggrFileOut.clear(); aggrFileOut.close(); vector<string>().swap(aggrIPs); vector<int>().swap(aggrPrefixes); // ------------------------------------------- // If decompress alters fingerprint length, compress other keys countKey += aggrCount; // key number after decompress cout<<"Increase aggrcount: "<<aggrCount<<endl; uint16_t cuckooBlackSize = CUCKOO_BLACK_SIZE; float loadFactor = 0.90; int fingerprint = (storage*1024.0f*8.0f-(cuckooBlackSize)*39/0.9-FLOW_EST_SIZE*17/0.9)*loadFactor/(countKey) -3; int iteartion = 0; int maxIteration = 12; ints prefixlength; prefixlength.assign(actionSize,20); while(fingerprint!=fingerprintOld && iteartion<maxIteration) { iteartion++; // ----------------------------------------- // aggregate all other keys /* Aggregation */ /* Init variables */ countKey = 0; countAggregateKey = 0; countBlackKey =0; countOriKey =0; keys.clear(); keyActions.clear(); blackKeys.clear(); blackkeyPrefixes.clear(); aggregateKeys.clear(); for(int ai = 0; ai < actionSize; ai++) { g_vcountkey[ai] = 0; g_vcountblackkey[ai] = 0; if(fingerprint>fingerprintOld) // decompress, increase threshold { g_vweightThld[ai] += 0.2*g_vweightThld[ai]*(fingerprint - fingerprintOld); prefixlength[ai] ++; } else if( g_vweightThld[ai] > 0) { g_vweightThld[ai] += 0.1*g_vweightThld[ai]*(fingerprint - fingerprintOld) ; prefixlength[ai] --; } //g_vweightThld[ai] = 0; // ---------------------------------------------------------- /* aggregate Trie */ aggregateTrie(bTrie, ai, actionOrder, countKey,countAggregateKey, countBlackKey,countOriKey, keys,keyActions,blackKeys,blackkeyPrefixes, aggregateKeys, aggrPrefixlength, 1, isInit); cout<<"* threshold: "<<g_vweightThld[ai]<<" prefixlength: "<<prefixlength[ai]<<endl; } cout<<" coutkey "<<"agg "<<"blkey "<<"orikey "<<countOriKey<<" "<< countAggregateKey<<" "<<countBlackKey<<" "<<countKey<<endl; fingerprint = (storage*1024.0f*8.0f*loadFactor-(cuckooBlackSize)*39/0.9-FLOW_EST_SIZE*17/0.9)/(countKey) -3; cout<<"Fingerprint: "<<fingerprint<<endl; } // ----------------------------------------- // Get aggregate result countKey = 0; countAggregateKey = 0; countBlackKey =0; countOriKey =0; keys.clear(); keyActions.clear(); blackKeys.clear(); blackkeyPrefixes.clear(); aggregateKeys.clear(); vector<char> word; size_t recoverCount = 0; for(uint16_t ti = 0; ti < trieNum; ti++) { bTrie[ti].nodeCount(bTrie[ti].root,countKey,countAggregateKey,countBlackKey,countOriKey); bTrie[ti].printNode(bTrie[ti].root,word,keys,keyActions,blackKeys,blackkeyPrefixes,aggregateKeys); bTrie[ti].recoverTrie(bTrie[ti].root8, recoverCount); } cout<<" coutkey "<<"agg "<<"blkey "<<"orikey "<<countOriKey<<" "<< countAggregateKey<<" "<<countBlackKey<<" "<<countKey<<endl; // ---------------------------------------------- // Compute Unique prefixes vector<int> keyPrefixes; for(size_t i = 0; i < keys.size(); i++) { size_t found = keys[i].find('/'); string prefixstr = keys[i].substr(found+1, keys[i].size()-found); keyPrefixes.push_back(str2num(prefixstr)); } uniqueAggKeyprefixes.clear(); prefixNum(keyPrefixes,uniqueAggKeyprefixes); // ---------------------------------- /* Insert to cuckoo filter */ // parameters for cuckoo filter int slotNo = 4; size_t keySize = keys.size(); size_t bucketNo = size_t(keySize/(loadFactor*slotNo))+1; fingerprint = (storage*1024.0f*8.0f-(cuckooBlackSize)*39/0.9-FLOW_EST_SIZE*17/0.9)*loadFactor/(countKey) -3; //fingerprintOld = fingerprint; long maxNumKicks = 1000; cout<<"* Fingerprint length: "<<fingerprint<<endl; //init cuckoo filter cuckooFilter.ClearTable(); cuckooFilter.cuckooFilterInit(bucketNo,fingerprint,slotNo,maxNumKicks); // add key to cuckoo filter addCuckooFilter(keys, keyActions); // -------------------------------------- // Add aggregate keys to cuckooTable size_t aggregateKeySize = aggregateKeys.size(); bucketNo = long(aggregateKeySize/(loadFactor*slotNo)); cuckooAggrKeyTable.CuckooTableInit(bucketNo,fingerprint,slotNo,maxNumKicks); for(size_t i = 0; i < aggregateKeySize; i++) { size_t found = aggregateKeys[i].find('/'); string str = aggregateKeys[i].substr(0,found); string prefixstr = aggregateKeys[i].substr(found+1, aggregateKeys[i].size()-found); cuckooAggrKeyTable.AddKey(str,str2num(prefixstr)); } vector<string>().swap(keys); vector<int>().swap(keyActions); vector<string>().swap(blackKeys); vector<int>().swap(blackkeyPrefixes); vector<string>().swap(aggregateKeys); // -------------------------------------- // Add blackkey to cuckooTable /*cout<<"* Add blackkey to cuckooTable!"<<endl; size_t blackKeySize = blackKeys.size(); size_t bucketSize = int((blackKeySize)/(loadFactor*slotNo))+1; int MaxKickoutNum = 1000; cuckooBlackKeyTable.ClearTable(); cuckooBlackKeyTable.CuckooTableInit(bucketSize,fingerprint,slotNo, MaxKickoutNum); for(size_t i = 0; i < blackKeySize; i++) { cuckooBlackKeyTable.AddKeyPrefix(blackKeys[i],blackkeyPrefixes[i], 4); } blackKeys.clear(); blackkeyPrefixes.clear();*/ // --------------------------------------- for(int i = 0; i < trieNum; i++) { bTrie[i].deleteChild(bTrie[i].root); } if(!bTrie) delete[] bTrie; cout<<"* Aggregation Return! "<<endl; return countKey; }
void Shell::listVariables(const strings& args) { // get target description if (args.size() < 2) { wcerr << "missing argument" << endl; return; } bool ok; const unsigned nodeId(getNodeId(UTF8ToWString(args[1]), 0, &ok)); if (!ok) { wcerr << "invalid node name " << UTF8ToWString(args[1]) << endl; return; } const TargetDescription* desc(getDescription(nodeId)); if (!desc) return; // dump target description wcerr << "Target name " << desc->name << endl; wcerr << "protocol version " << desc->protocolVersion << endl; wcerr << "bytecode size " << desc->bytecodeSize << endl; wcerr << "variables size " << desc->variablesSize << endl; wcerr << "stack size " << desc->stackSize << endl; wcerr << "variables:" << endl; // if we have a result from the compiler for this node... const NodeNameToVariablesMap::const_iterator allVarMapIt(allVariables.find(args[1])); if (allVarMapIt != allVariables.end()) { // ... use it const VariablesMap& varMap(allVarMapIt->second); VariablesMap::const_iterator it(varMap.begin()); for (;it != varMap.end(); ++it) wcerr << " " << it->first << " " << it->second.second << endl; } else { // ... otherwise shows variables from the target description for (size_t i=0; i<desc->namedVariables.size(); ++i) { const TargetDescription::NamedVariable& var(desc->namedVariables[i]); wcerr << " " << var.name << " " << var.size << endl; } } wcerr << "local events: " << endl; for (size_t i=0; i<desc->localEvents.size(); ++i) { const TargetDescription::LocalEvent &event(desc->localEvents[i]); wcerr << " " << event.name << " - " << event.description << endl; } wcerr << "native functions:" << endl; for (size_t i=0; i<desc->nativeFunctions.size(); ++i) { const TargetDescription::NativeFunction& func(desc->nativeFunctions[i]); wcerr << " " << func.name << "("; for (size_t j=0; j<func.parameters.size(); ++j) { const TargetDescription::NativeFunctionParameter& param(func.parameters[j]); wcerr << param.name << "["; if (param.size > 0) wcerr << param.size; else wcerr << "T<" << -param.size << ">"; wcerr << "]"; if (j+1 != func.parameters.size()) wcerr << ", "; } wcerr << ") - "; wcerr << func.description << endl; } }