void populateList(NodeList &nlist) { XmlRegexIO xmlRIO; string filename = "ScanParams.xml"; string dataFilename = "ScanParamsData.xml"; XmlNode *paperSize = new PaperSize(); XmlNode *area = new Area(); XmlNode *patchCodeNotification = new PatchCodeNotification(); xmlRIO.setPattern("<element name=\"PaperSize\"\\s*>"); xmlRIO.getXmlTags(filename, *paperSize); xmlRIO.setPattern("<[/]*PaperSize>"); xmlRIO.getXmlData(dataFilename, *paperSize); xmlRIO.setPattern("<element name=\"Area\"\\s*>"); xmlRIO.getXmlTags(filename, *area); xmlRIO.setPattern("<[/]*Area>"); xmlRIO.getXmlData(dataFilename, *area); xmlRIO.setPattern("<element name=\"PatchCodeNotification\".*>"); xmlRIO.getXmlTags(filename, *patchCodeNotification); xmlRIO.setPattern("<[/]*PatchCodeNotification>"); xmlRIO.getXmlData(dataFilename, *patchCodeNotification); nlist.push_back(paperSize); nlist.push_back(area); nlist.push_back(patchCodeNotification); }
NodeList InstBr::getTerminatorEdges() const { NodeList OutEdges; OutEdges.push_back(TargetFalse); if (TargetTrue) OutEdges.push_back(TargetTrue); return OutEdges; }
NodeList InstSwitch::getTerminatorEdges() const { NodeList OutEdges; OutEdges.push_back(LabelDefault); for (SizeT I = 0; I < NumCases; ++I) { OutEdges.push_back(Labels[I]); } return OutEdges; }
// Evaluate an XPath expression and return matching Nodes. NodeList Document::findXPath(const std::string& path) const { // Set up the XPath context xmlXPathContextPtr context = xmlXPathNewContext(_xmlDoc); if (context == NULL) { std::cerr << "ERROR: xml::findPath() failed to create XPath context " << "when searching for " << path << std::endl; throw XPathException("Failed to create XPath context"); } // Evaluate the expression const xmlChar* xpath = reinterpret_cast<const xmlChar*>(path.c_str()); xmlXPathObjectPtr result = xmlXPathEvalExpression(xpath, context); xmlXPathFreeContext(context); if (result == NULL) { std::cerr << "ERROR: xml::findPath() failed to evaluate expression " << path << std::endl; throw XPathException("Failed to evaluate XPath expression"); } // Construct the return vector. This may be empty if the provided XPath // expression does not identify any nodes. NodeList retval; xmlNodeSetPtr nodeset = result->nodesetval; if (nodeset != NULL) { for (int i = 0; i < nodeset->nodeNr; i++) { retval.push_back(Node(nodeset->nodeTab[i])); } } xmlXPathFreeObject(result); return retval; }
void SplitTree::rdsPrintPass( SplitPassInfo* inPass, std::ostream& inStream ) { if( inPass == NULL ) return; if( inPass->printVisited ) return; inPass->printVisited = true; for( SplitNodeSet::iterator j = inPass->descendents.begin(); j != inPass->descendents.end(); ++j ) rdsPrintPass( _dagOrderNodeList[*j]->_assignedPass, inStream ); // dumpFile << "PRINT PASS " << (void*)inPass << std::endl; // for( NodeSet::iterator i = inPass->outputs.begin(); i != inPass->outputs.end(); ++i ) // { // SplitNode* node = *i; // dumpFile << "% "; // node->dump( dumpFile ); // dumpFile << std::endl; // } NodeList outputs; for( SplitNodeSet::iterator i = inPass->usefulOutputs.begin(); i != inPass->usefulOutputs.end(); ++i ) outputs.push_back( _dagOrderNodeList[ *i ] ); SplitShaderHeuristics unused; _compiler.compile( *this, outputs, inStream, unused, true ); }
void ParallelBFS::calculate(NodeId root) { distance.assign((size_t) vertex_count, infinity); NodeId level = 1; NodeList frontier; frontier.reserve((size_t)vertex_count); if (comm.rank() == find_owner(root)) { frontier.push_back(root - first_vertex); distance[root - first_vertex] = 0; } std::vector<NodeList> send_buf((size_t)comm.size()); NodeList new_frontier; NodeList sizes((size_t)comm.size()), displacements((size_t)comm.size()); while (mpi::all_reduce(comm, (NodeId)frontier.size(), std::plus<NodeId>()) > 0) { for (NodeId u : frontier) for (int e = vertices[u]; e < vertices[u + 1]; ++e) { int v = edges[e]; send_buf[find_owner(v)].push_back(v); } for (int i = 0; i < comm.size(); ++i) { mpi::gather(comm, (NodeId)send_buf[i].size(), sizes.data(), i); if (i == comm.rank()) { for (int j = 1; j < comm.size(); ++j) displacements[j] = displacements[j - 1] + sizes[j - 1]; new_frontier.resize( (size_t)(displacements[comm.size()-1] + sizes[comm.size() - 1])); mpi::gatherv(comm, send_buf[i], new_frontier, sizes, displacements, i); } else { mpi::gatherv(comm, send_buf[i], i); } } for (size_t i = 0; i < comm.size(); ++i) send_buf[i].clear(); frontier.clear(); for (int v : new_frontier) { v -= first_vertex; if (distance[v] == infinity) { distance[v] = level; frontier.push_back(v); } } ++level; } }
void sgNode::getInheritsNodes( NodeList &outlist, size_t count /*= 0*/, const StringHandleSet &classTypefilter /*= StringHandleSet()*/ ) { outlist.clear(); if(count > 0) outlist.reserve(count); else count = size_t(-1) - 1; bool doFilter = true; if(classTypefilter.empty()) doFilter = false; size_t mycount = 0; if(mycount == count) return ; sg_list(sgNode*) nodeQueue; nodeQueue.push_back(this); ChildNodeMap *childMap = &m_Children; ChildNodeMap::const_iterator it = childMap->begin(); while(mycount < count && !nodeQueue.empty()) { sgNode *node = nodeQueue.front(); nodeQueue.pop_front(); // do sth. if(!doFilter) { outlist.push_back(node); ++mycount; } else if(classTypefilter.find(node->GetMyClassName()) != classTypefilter.end()) { outlist.push_back(node); ++mycount; } childMap = &(node->m_Children); it = childMap->begin(); for(; it!=childMap->end(); ++it) { nodeQueue.push_back(it->second); } } }
///////////////////////////////////////////////////////////////////////////////////////////////////////////// ////// Performs a breadth first search to find shortest available path (with non-saturated path capacities) ////// between a search node and a sink node. The function uses a search graph to perform the search and ////// store parent-child relationship of the nodes in the graph. ///////////////////////////////////////////////////////////////////////////////////////////////////////////// void PerformBFS(const Graph& graph, SearchGraph& sgraph, int source, int sink, Path* augpath) { NodeList nlist; nlist.push_back(source); int u, v; int i; SearchNode snode_u, snode_v; bool found = false; vector<int> nbr_nodes; while (nlist.size() > 0) { u = nlist[0]; GetNeighboringNodes(u, graph, &nbr_nodes); snode_u = sgraph[u]; for (i = 0; i < (int) nbr_nodes.size(); i++) { v = nbr_nodes[i]; snode_v = sgraph[v]; if (snode_v.color == -1) { snode_v.color = 0; snode_v.dist = snode_u.dist + 1; snode_v.parent = u; sgraph[v] = snode_v; nlist.push_back(v); } if (v == sink) { found = true; break; } } nlist.pop_front(); snode_u.color = 1; sgraph[u] = snode_u; if (found == true) break; } if (found == true) FindAugmentingPath(sgraph, source, sink, augpath); }
// Read the Kaggle file into a vector of nodes bool DataScaling::generateFirstLevel(ifstream& kaggleFile) { cout << "Level:0" << "\t"; // Set the maximum number of nodes per block, so a block can be processed in memory int maxNodes = 1024 * 64; // must be a power of two int maxNodesFlag = maxNodes - 1; // bit trick to avoid modulo later // Get the nodeList for level 0 NodeList* nodeList = this->nodes[0]; // Process each line in the kaggle file int block = 0; stringstream ss; string buffer; string token; while (getline(kaggleFile, buffer)) { // C++ stuff so we can tokenize the line ss.clear(); ss.str(""); ss << buffer; // The node id is always the first token on the kaggle line ss >> token; int nodeId = atoi(token.c_str()); // Create a node object Node* node = new Node(nodeId); nodeList->push_back(node); // Parse this node's word list while (getline(ss, token, '|')) // kaggle file delimits words with a '|' character { int hashKeyOfWord = atoi(token.c_str()); node->addWord(hashKeyOfWord); } // We need to process the input in blocks, can't fit all the nodes in memory // Check if we've read in a full block yet // Bit AND'ing with maxNodesFlag is a trick to avoid expensive modulo if ((nodeId > 0) && !(nodeId & maxNodesFlag)) { cout << "Block:" << ++block << "\t"; this->nodeCount += nodeList->size(); } } // Process the last incomplete block { cout << "Block:" << ++block << "\t"; this->nodeCount += nodeList->size(); } cout << "Level:1\tnodeCount:" << this->nodeCount << endl; return(true); }
void Pathfinder::PopulateListWithNodes(NodeList& toPopulate) { for(int x = 0; x < m_currentLevel->GetWidth(); x++) { for(int y = 0; y < m_currentLevel->GetHeight(); y++) { if(m_nodeMap[x][y] && m_nodeMap[x][y]->IsOpen()) { toPopulate.push_back(m_nodeMap[x][y]); } } } }
Node::NodeList Node::Children(const string & filterByName) { NodeList list; for ( xmlNodePtr child = _xml->children; child != nullptr; child = child->next ) { if ( filterByName.empty() || filterByName == child->name ) list.push_back(Wrapped<Node, _xmlNode>(child)); } return list; }
xml::NodeList xml::Node::getChildrenByName(const std::string& name) const { NodeList nodes; for (xmlNodePtr child = node->children; child; child = child->next) if (child->type == XML_ELEMENT_NODE && !strcmp((char*) child->name, name.c_str())) { Node node(child); nodes.push_back(node); } return nodes; }
xml::NodeList xml::Node::getChildNodes() const { NodeList nodes; for (xmlNodePtr child = node->children; child; child = child->next) if (child->type == XML_ELEMENT_NODE) { Node node(child); nodes.push_back(node); } return nodes; }
void findAll(const Path& path, const dom::NodePtr node, NodeList& result) { const detail::LocationStepList& steps = path.getStepList().steps; detail::Context context(node.get()); result.clear(); detail::NodePtrVec temp; findNodes(context,steps.begin(),steps.end(),temp,false); for( detail::NodePtrVec::const_iterator it=temp.begin(); it!=temp.end(); ++it ) { result.push_back( (*it)->self().lock() ); } }
// Return a NodeList of all children of this node NodeList Node::getChildren() const { NodeList retval; // Iterate throught the list of children, adding each child node // to the return list if it matches the requested name for (xmlNodePtr child = _xmlNode->children; child != NULL; child = child->next) { retval.push_back(child); } return retval; }
void DescriptorHeapAllocator::AllocateHeap() { PtrDescHeap Heap; ThrowIfFailed(m_Device->CreateDescriptorHeap(&m_Desc, IID_PPV_ARGS(Heap.GetAddressOf()))); D3D12_CPU_DESCRIPTOR_HANDLE HeapBase = Heap->GetCPUDescriptorHandleForHeapStart(); m_Heaps.reserve(m_Heaps.size() + 1); NodeList freeList; freeList.push_back({ HeapBase.ptr, HeapBase.ptr + m_Desc.NumDescriptors * m_DescriptorSize }); Entry entry = { Heap, freeList}; m_Heaps.push_back(entry); m_FreeHeaps.push_back(m_Heaps.size() - 1); }
///////////////////////////////////////////////////////////////////////////////////////////////////////////// ////// Given a residual graph with no path from the source and sink node, the sunction outputs an assignment list ////// which assigns each node to either belonging to the source tree or the sink tree. It performs breadth first ////// search on the residual graph to find the children of the source tress. Any remaining nodes are aressigned ////// to the sink node. ///////////////////////////////////////////////////////////////////////////////////////////////////////////// void ComputeAssignments(const Graph& graph, int source, int sink, vector<int>* assignments) { SearchGraph sgraph; assignments->clear(); const int nnodes = graph.size(); InitSearchGraph(&sgraph, nnodes, source); assignments->resize(nnodes, -1); NodeList nlist; nlist.push_back(source); int u, v; int i; SearchNode snode_u, snode_v; bool found = false; vector<int> nbr_nodes; while (nlist.size() > 0) { u = nlist[0]; (*assignments)[u] = 1; GetNeighboringNodes(u, graph, &nbr_nodes); snode_u = sgraph[u]; for (i = 0; i < (int) nbr_nodes.size(); i++) { v = nbr_nodes[i]; snode_v = sgraph[v]; if (snode_v.color == -1) { snode_v.color = 0; snode_v.dist = snode_u.dist + 1; snode_v.parent = u; sgraph[v] = snode_v; nlist.push_back(v); } } nlist.pop_front(); snode_u.color = 1; sgraph[u] = snode_u; } }
// Generate a new level of nodes bool DataScaling::generateNextLevel(int level) { cout << "Level:" << level << "\t"; // Get the number of nodes on the previous level NodeList* previousNodeList = this->nodes[level - 1]; int previousSize = previousNodeList->size(); // Set the number of new nodes to generate int numNewNodes = this->nodesPerLevel; // Create new random nodes NodeList* currentNodeList = this->nodes[level]; for (int count = 0; count < numNewNodes; ++count) { // Create a new node int newId = this->nodeCount + count; // nodeCount is total number of nodes in all previous levels Node* newNode = new Node(newId); currentNodeList->push_back(newNode); // Randomly select the degree for this node int nodeDegree = (rand() % this->degree) + 1; // For each degree (as specified on the command line) for (int degree = 0; degree < nodeDegree; ++degree) { // Select a random node from the previous level int oldId = rand() % previousSize; // Check if there's already an edge to the selected node if (!newNode->findEdge(oldId)) { Node* oldNode = previousNodeList->at(oldId); // Add an edge between the new node and the old node newNode->addEdge(oldId); oldNode->addEdge(newId); this->edgeCount += 2; // Randomly add some of the old node's words into the new node int numWords = this->wordsPerNode / nodeDegree; newNode->addPartialWordList(oldNode, numWords); } } } // Update the total number of nodes so far this->nodeCount += numNewNodes; cout << "nodeCount:" << this->nodeCount << endl; return(true); }
void InitializeNodes(string inFile) { nMap = new NodeMap(); ifstream iFile (inFile.c_str()); if (iFile.is_open()) { string line; while(getline(iFile, line)) { istringstream iss(line); string node1, node2, costString, relString; getline(iss, node1, ','); getline(iss, node2, ','); getline(iss, costString, ','); getline(iss, relString, ','); Node* n12 = new Node(node2, atof(costString.c_str()), atof(relString.c_str())); Node* n21 = new Node(node1, atof(costString.c_str()), atof(relString.c_str())); if(nMap->find(node1) == nMap->end()) { NodeList *nList = new NodeList(); nMap->insert(pair<string, NodeList*> (node1, nList)); } NodeList *nList = nMap->find(node1)->second; nList->push_back(n12); if (nMap->find(node2) == nMap->end()) { NodeList *nList = new NodeList(); nMap->insert(pair<string, NodeList*> (node2, nList)); } nList = nMap->find(node2)->second; nList->push_back(n21); } } }
NodeList Node::getNamedChildren(const std::string& name) const { NodeList retval; // Iterate throught the list of children, adding each child node // to the return list if it matches the requested name for (xmlNodePtr child = _xmlNode->children; child != NULL; child = child->next) { if (xmlStrcmp(child->name, reinterpret_cast<const xmlChar*>(name.c_str())) == 0) { retval.push_back(child); } } return retval; }
/** * If a node with the same ID exists, update it. * Otherwise add a new node. * Assumes that remaining never changes for the same node id. **/ inline void add(NodeId id, const Node * parent, float distance, float remaining) { // Check if node is already in open list. for(NodeList::iterator i = nodes.begin(); i != nodes.end(); ++i) { if((*i)->getId() == id) { if((*i)->getDistance() > distance) { (*i)->newParent(parent, distance); } return; } } nodes.push_back(new Node(id, parent, distance, remaining)); }
void HexagonOptAddrMode::getAllRealUses(NodeAddr<StmtNode *> SA, NodeList &UNodeList) { for (NodeAddr<DefNode *> DA : SA.Addr->members_if(DFG->IsDef, *DFG)) { DEBUG(dbgs() << "\t\t[DefNode]: " << Print<NodeAddr<DefNode *>>(DA, *DFG) << "\n"); RegisterRef DR = DA.Addr->getRegRef(); auto UseSet = LV->getAllReachedUses(DR, DA); for (auto UI : UseSet) { NodeAddr<UseNode *> UA = DFG->addr<UseNode *>(UI); NodeAddr<StmtNode *> TempIA = UA.Addr->getOwner(*DFG); (void)TempIA; DEBUG(dbgs() << "\t\t\t[Reached Use]: " << Print<NodeAddr<InstrNode *>>(TempIA, *DFG) << "\n"); if (UA.Addr->getFlags() & NodeAttrs::PhiRef) { NodeAddr<PhiNode *> PA = UA.Addr->getOwner(*DFG); NodeId id = PA.Id; const Liveness::RefMap &phiUse = LV->getRealUses(id); DEBUG(dbgs() << "\t\t\t\tphi real Uses" << Print<Liveness::RefMap>(phiUse, *DFG) << "\n"); if (phiUse.size() > 0) { for (auto I : phiUse) { if (DR != I.first) continue; auto phiUseSet = I.second; for (auto phiUI : phiUseSet) { NodeAddr<UseNode *> phiUA = DFG->addr<UseNode *>(phiUI); UNodeList.push_back(phiUA); } } } } else UNodeList.push_back(UA); } } }
Node* osgDB::readNodeFiles(std::vector<std::string>& commandLine,const ReaderWriter::Options* options) { typedef std::vector<osg::Node*> NodeList; NodeList nodeList; // note currently doesn't delete the loaded file entries from the command line yet... for(std::vector<std::string>::iterator itr=commandLine.begin(); itr!=commandLine.end(); ++itr) { if ((*itr)[0]!='-') { // not an option so assume string is a filename. osg::Node *node = osgDB::readNodeFile( *itr , options ); if( node != (osg::Node *)0L ) { if (node->getName().empty()) node->setName( *itr ); nodeList.push_back(node); } } } if (nodeList.empty()) { return NULL; } if (nodeList.size()==1) { return nodeList.front(); } else // size >1 { osg::Group* group = new osg::Group; for(NodeList::iterator itr=nodeList.begin(); itr!=nodeList.end(); ++itr) { group->addChild(*itr); } return group; } }
bool SplitTree::rdsCompile( const SplitNodeSet& inNodes, SplitShaderHeuristics& outHeuristics ) { unsigned long startCompile = getTime(); NodeList nodes; for( SplitNodeSet::iterator i = inNodes.begin(); i != inNodes.end(); ++i ) nodes.push_back( _dagOrderNodeList[ *i ] ); std::ostringstream nullStream; _compiler.compile( *this, nodes, nullStream, outHeuristics ); unsigned long stopCompile = getTime(); timeCompilingCounter += stopCompile - startCompile; return outHeuristics.valid; }
bool PagedLOD::removeExpiredChildren(double expiryTime, unsigned int expiryFrame, NodeList& removedChildren) { if (_children.size()>_numChildrenThatCannotBeExpired) { unsigned cindex = _children.size() - 1; if (!_perRangeDataList[cindex]._filename.empty() && _perRangeDataList[cindex]._timeStamp + _perRangeDataList[cindex]._minExpiryTime < expiryTime && _perRangeDataList[cindex]._frameNumber + _perRangeDataList[cindex]._minExpiryFrames < expiryFrame) { osg::Node* nodeToRemove = _children[cindex].get(); removedChildren.push_back(nodeToRemove); return Group::removeChildren(cindex,1); } } return false; }
void SplitTree::exhaustiveSearch() { // first label the outputs for( NodeList::iterator i = _outputList.begin(); i != _outputList.end(); ++i ) { (*i)->_splitHere = true; } // now collect all the unlabeled, nontrivial nodes: NodeList nodesToConsider; for( NodeList::iterator j = _dagOrderNodeList.begin(); j != _dagOrderNodeList.end(); ++j ) { if( (*j)->isMarkedAsSplit() ) continue; if( !(*j)->canBeSaved() ) continue; if( *j == _pseudoRoot ) continue; nodesToConsider.push_back( *j ); } size_t nodeCount = nodesToConsider.size(); int bestScore = INT_MAX; for( size_t subsetSize = 0; subsetSize < nodeCount; subsetSize++ ) { std::cout << "considering subsets of size " << subsetSize << " out of " << nodeCount << std::endl; int bestScoreForSubsetSize = INT_MAX; exhaustiveSubsetSearch( subsetSize, nodesToConsider, bestScoreForSubsetSize ); std::cout << "best split has score: " << bestScoreForSubsetSize << std::endl; if( bestScoreForSubsetSize != INT_MAX ) { if( (bestScore != INT_MAX) && (bestScoreForSubsetSize > bestScore) ) { // there probably isn't a better partition, lets use this :) break; } if( bestScoreForSubsetSize < bestScore ) bestScore = bestScoreForSubsetSize; } } std::cout << "best overall score found before giving up: " << bestScore << std::endl; }
DOMElement::NodeList DOMElement::getChildNodes() const { NodeList list; if(!m_wrapped) return list; xercesc::DOMNodeList * xList = XELEM(m_wrapped)->getChildNodes(); for(XMLSize_t i = 0; i < xList->getLength(); i++) { xercesc::DOMElement * xe = dynamic_cast<xercesc::DOMElement *> (xList->item(i)); if(!xe) continue; list.push_back(DOMElement(ELEM(xe))); } return list; }
virtual ReadResult readNode(std::istream& fin, const Options* options) const { loadWrappers(); fin.imbue(std::locale::classic()); Input fr; fr.attach(&fin); fr.setOptions(options); typedef std::vector<osg::Node*> NodeList; NodeList nodeList; // load all nodes in file, placing them in a group. while(!fr.eof()) { Node *node = fr.readNode(); if (node) nodeList.push_back(node); else fr.advanceOverCurrentFieldOrBlock(); } if (nodeList.empty()) { return ReadResult("No data loaded"); } else if (nodeList.size()==1) { return nodeList.front(); } else { Group* group = new Group; group->setName("import group"); for(NodeList::iterator itr=nodeList.begin(); itr!=nodeList.end(); ++itr) { group->addChild(*itr); } return group; } }
Node* osgDB::readNodeFiles(std::vector<std::string>& fileList,const Options* options) { typedef std::vector<osg::Node*> NodeList; NodeList nodeList; for(std::vector<std::string>::iterator itr=fileList.begin(); itr!=fileList.end(); ++itr) { osg::Node *node = osgDB::readNodeFile( *itr , options ); if( node != (osg::Node *)0L ) { if (node->getName().empty()) node->setName( *itr ); nodeList.push_back(node); } } if (nodeList.empty()) { return NULL; } if (nodeList.size()==1) { return nodeList.front(); } else // size >1 { osg::Group* group = new osg::Group; for(NodeList::iterator itr=nodeList.begin(); itr!=nodeList.end(); ++itr) { group->addChild(*itr); } return group; } }
bool IsMirror(Node *root) { typedef list<Node *> NodeList; NodeList leftList; NodeList rightList; if (!root) { // an empty tree is mirrored return true; } leftList.push_back(root->left); rightList.push_back(root->right); // BFS traversal. // Pushing children to the lists and then comparing their values. while (!leftList.empty() && !rightList.empty()) { Node *left = leftList.front(); leftList.pop_front(); Node *right = rightList.front(); rightList.pop_front(); if (!left && !right) { continue; } else if (!left || !right) { return false; } if (left->value != right->value) { return false; } leftList.push_back(left->left); leftList.push_back(left->right); // the insert order is reversed in right sub-tree rightList.push_back(right->right); rightList.push_back(right->left); } // Both lists should be empty, otherwise this is not a mirrored binary tree. return leftList.empty() && rightList.empty(); }