const Node& Loader::parseTree() { auto it = fileContents.begin() + sizeof(Identifier); if (static_cast<uint8_t>(*it) != Node::START) { throw InvalidOTBFormat{}; } root.type = *(++it); root.propsBegin = ++it; NodeStack parseStack; parseStack.push(&root); for (; it != fileContents.end(); ++it) { switch(static_cast<uint8_t>(*it)) { case Node::START: { auto& currentNode = getCurrentNode(parseStack); if (currentNode.children.empty()) { currentNode.propsEnd = it; } currentNode.children.emplace_back(); auto& child = currentNode.children.back(); if (++it == fileContents.end()) { throw InvalidOTBFormat{}; } child.type = *it; child.propsBegin = it + sizeof(Node::type); parseStack.push(&child); break; } case Node::END: { auto& currentNode = getCurrentNode(parseStack); if (currentNode.children.empty()) { currentNode.propsEnd = it; } parseStack.pop(); break; } case Node::ESCAPE: { if (++it == fileContents.end()) { throw InvalidOTBFormat{}; } break; } default: { break; } } } if (!parseStack.empty()) { throw InvalidOTBFormat{}; } return root; }
void Network::topologicalSortExecutionNetwork() { // Note: we don't need to do a full-fledged topological sort here, as we do not // have any DAG, we actually have a dependency tree. This way we can just do a // depth-first search, with ref-counting to account for diamond shapes in the tree. // this is similar to the wavefront design pattern used in parallelization // Using DFS here also has the advantage that it makes as much as possible use // of cache locality // 1- get all the nodes and count the number of refs they have NodeVector nodes = depthFirstSearch(_executionNetworkRoot); map<NetworkNode*, int> refs; // this initialization should be useless, but let's do it anyway for clarity for (int i=0; i<(int)nodes.size(); i++) refs[nodes[i]] = 0; // count the number of refs for each node for (int i=0; i<(int)nodes.size(); i++) { const NodeVector& children = nodes[i]->children(); for (int j=0; j<(int)children.size(); j++) { refs[children[j]] += 1; } } // 2- do DFS again, manually this time and only visit node which have no refs anymore _toposortedNetwork.clear(); NodeStack toVisit; toVisit.push(_executionNetworkRoot); refs[_executionNetworkRoot] = 1; while (!toVisit.empty()) { NetworkNode* currentNode = toVisit.top(); toVisit.pop(); if (--refs[currentNode] == 0) { _toposortedNetwork.push_back(currentNode->algorithm()); // keep this node, it is good const NodeVector& children = currentNode->children(); for (int i=0; i<(int)children.size(); i++) { toVisit.push(children[i]); } } } E_DEBUG(ENetwork, "-------------------------------------------------------------------------------------------"); for (int i=0; i<(int)_toposortedNetwork.size(); i++) { E_DEBUG_NONL(ENetwork, " → " << _toposortedNetwork[i]->name()); } E_DEBUG(ENetwork, ""); // for adding a newline E_DEBUG(ENetwork, "-------------------------------------------------------------------------------------------"); }
/* * Prepare |pn| to be mutated in place into a new kind of node. Recycle all * |pn|'s recyclable children (but not |pn| itself!), and disconnect it from * metadata structures (the function box tree). */ void ParseNodeAllocator::prepareNodeForMutation(ParseNode *pn) { if (!pn->isArity(PN_NULLARY)) { /* Put |pn|'s children (but not |pn| itself) on a work stack. */ NodeStack stack; PushNodeChildren(pn, &stack); /* * For each node on the work stack, push its children on the work stack, * and free the node if we can. */ while (!stack.empty()) { pn = stack.pop(); if (PushNodeChildren(pn, &stack)) freeNode(pn); } } }
/* * Return the nodes in the subtree |pn| to the parser's free node list, for * reallocation. */ ParseNode * ParseNodeAllocator::freeTree(ParseNode *pn) { if (!pn) return nullptr; ParseNode *savedNext = pn->pn_next; NodeStack stack; for (;;) { if (PushNodeChildren(pn, &stack)) freeNode(pn); if (stack.empty()) break; pn = stack.pop(); } return savedNext; }
/* * Prepare |pn| to be mutated in place into a new kind of node. Recycle all * |pn|'s recyclable children (but not |pn| itself!), and disconnect it from * metadata structures (the function box tree). */ void ParseNodeAllocator::prepareNodeForMutation(ParseNode* pn) { // Nothing to do for nullary nodes. if (pn->isArity(PN_NULLARY)) return; // Put |pn|'s children (but not |pn| itself) on a work stack. NodeStack stack; PushNodeChildren(pn, &stack); // For each node on the work stack, push its children on the work stack, // and free the node if we can. while (!stack.empty()) { pn = stack.pop(); if (PushNodeChildren(pn, &stack) == PushResult::Recyclable) freeNode(pn); } }
bool Graph::findPath(int startPosition, NodeStack & s) //finds path between node teleported to and the outside world //recurses through graph, keeping track of nodes that were visted //and direction that the adventurer traveled in. Nodes already visited //or paths resulting in not being able to get out of the maze can //simply be popped off the stack. { Node * start = getNode(startPosition); if(start -> getVisited()) { return false; } start -> setVisited(true); s.push(start); if(start -> getId() == 0) { return true; } if(s.empty()) { return false; } Node ** paths = start -> getPaths(); for(int i = 0; i < 4; i ++) { if(paths[i] != 0) { start -> setDirection(i); if(findPath(paths[i] -> getId(), s)) { return true; } } } s.pop(); return false; }
Node PicklerPrivate::fromCaseOperator(Kind k, uint32_t nchildren) { kind::MetaKind m = metaKindOf(k); bool parameterized = (m == kind::metakind::PARAMETERIZED); uint32_t npops = nchildren + (parameterized? 1 : 0); NodeStack aux; while(npops > 0) { Assert(!d_stack.empty()); Node top = d_stack.top(); aux.push(top); d_stack.pop(); --npops; } NodeBuilder<> nb(d_nm, k); while(!aux.empty()) { Node top = aux.top(); nb << top; aux.pop(); } return nb; }
/* * Prepare |pn| to be mutated in place into a new kind of node. Recycle all * |pn|'s recyclable children (but not |pn| itself!), and disconnect it from * metadata structures (the function box tree). */ void ParseNodeAllocator::prepareNodeForMutation(ParseNode *pn) { if (!pn->isArity(PN_NULLARY)) { if (pn->isArity(PN_FUNC)) { /* * Since this node could be turned into anything, we can't * ensure it won't be subsequently recycled, so we must * disconnect it from the funbox tree entirely. * * Note that pn_funbox may legitimately be NULL. functionDef * applies MakeDefIntoUse to definition nodes, which can come * from prior iterations of the big loop in compileScript. In * such cases, the defn nodes have been visited by the recycler * (but not actually recycled!), and their funbox pointers * cleared. But it's fine to mutate them into uses of some new * definition. */ if (pn->pn_funbox) pn->pn_funbox->node = NULL; } /* Put |pn|'s children (but not |pn| itself) on a work stack. */ NodeStack stack; PushNodeChildren(pn, &stack); /* * For each node on the work stack, push its children on the work stack, * and free the node if we can. */ while (!stack.empty()) { pn = stack.pop(); if (PushNodeChildren(pn, &stack)) freeNode(pn); } } }
SEXP gbm_plot( SEXP covariates, // vector or matrix of points to make predictions SEXP whichvar, // index of which var cols of X are SEXP num_trees, // number of trees to use SEXP init_func_est, // initial value SEXP fitted_trees, // tree list object SEXP categorical_splits, // categorical split list object SEXP var_types // vector of variable types ) { BEGIN_RCPP int tree_num = 0; int obs_num = 0; int class_num = 0; const Rcpp::NumericMatrix kCovarMat(covariates); const int kNumRows = kCovarMat.nrow(); const int kNumTrees = Rcpp::as<int>(num_trees); const Rcpp::IntegerVector kWhichVars(whichvar); const Rcpp::GenericVector kFittedTrees(fitted_trees); const Rcpp::GenericVector kSplits(categorical_splits); const Rcpp::IntegerVector kVarType(var_types); Rcpp::NumericVector predicted_func(kNumRows, Rcpp::as<double>(init_func_est)); if (kCovarMat.ncol() != kWhichVars.size()) { throw gbm_exception::InvalidArgument("shape mismatch"); } for (tree_num = 0; tree_num < kNumTrees; tree_num++) { const Rcpp::GenericVector kThisTree = kFittedTrees[tree_num]; const Rcpp::IntegerVector kThisSplitVar = kThisTree[0]; const Rcpp::NumericVector kThisSplitCode = kThisTree[1]; const Rcpp::IntegerVector kThisLeftNode = kThisTree[2]; const Rcpp::IntegerVector kThisRightNode = kThisTree[3]; const Rcpp::IntegerVector kThisMissingNode = kThisTree[4]; const Rcpp::NumericVector kThisWeight = kThisTree[6]; for (obs_num = 0; obs_num < kNumRows; obs_num++) { NodeStack stack; stack.push(0, 1.0); while (!stack.empty()) { const std::pair<int, double> top = stack.pop(); int iCurrentNode = top.first; const double kWeight = top.second; if (kThisSplitVar[iCurrentNode] == -1) // terminal node { predicted_func[class_num * kNumRows + obs_num] += kWeight * kThisSplitCode[iCurrentNode]; } else // non-terminal node { // is this a split variable that interests me? const Rcpp::IntegerVector::const_iterator found = std::find(kWhichVars.begin(), kWhichVars.end(), kThisSplitVar[iCurrentNode]); if (found != kWhichVars.end()) { const int kPredVar = found - kWhichVars.begin(); const double kXValue = kCovarMat(obs_num, kPredVar); // missing? if (ISNA(kXValue)) { stack.push(kThisMissingNode[iCurrentNode], kWeight); } // continuous? else if (kVarType[kThisSplitVar[iCurrentNode]] == 0) { if (kXValue < kThisSplitCode[iCurrentNode]) { stack.push(kThisLeftNode[iCurrentNode], kWeight); } else { stack.push(kThisRightNode[iCurrentNode], kWeight); } } else // categorical { const Rcpp::IntegerVector kCatSplits = kSplits[kThisSplitCode[iCurrentNode]]; const int kCatSplitIndicator = kCatSplits[kXValue]; if (kCatSplitIndicator == -1) { stack.push(kThisLeftNode[iCurrentNode], kWeight); } else if (kCatSplitIndicator == 1) { stack.push(kThisRightNode[iCurrentNode], kWeight); } else // handle unused level { stack.push(kThisMissingNode[iCurrentNode], kWeight); } } } // iPredVar != -1 else // not interested in this split, average left and right { const int kRight = kThisRightNode[iCurrentNode]; const int kLeft = kThisLeftNode[iCurrentNode]; const double kRightWeight = kThisWeight[kRight]; const double kLeftWeight = kThisWeight[kLeft]; stack.push(kRight, kWeight * kRightWeight / (kRightWeight + kLeftWeight)); stack.push(kLeft, kWeight * kLeftWeight / (kRightWeight + kLeftWeight)); } } // non-terminal node } // while(cStackNodes > 0) } // iObs } // iTree return Rcpp::wrap(predicted_func); END_RCPP } // gbm_plot