bool IsMirror(Node *root)
{
    typedef list<Node *> NodeList;
    NodeList leftList;
    NodeList rightList;

    if (!root)
    {
        // an empty tree is mirrored
        return true;
    }

    leftList.push_back(root->left);
    rightList.push_back(root->right);

    // BFS traversal. 
    // Pushing children to the lists and then comparing their values.
    while (!leftList.empty() && !rightList.empty())
    {
        Node *left = leftList.front();
        leftList.pop_front();
        Node *right = rightList.front();
        rightList.pop_front();

        if (!left && !right)
        {
            continue;
        }
        else if (!left || !right)
        {
            return false;
        }

        if (left->value != right->value)
        {
            return false;
        }

        leftList.push_back(left->left);
        leftList.push_back(left->right);

        // the insert order is reversed in right sub-tree
        rightList.push_back(right->right);
        rightList.push_back(right->left);
    }

    // Both lists should be empty, otherwise this is not a mirrored binary tree.
    return leftList.empty() && rightList.empty();
}
예제 #2
0
파일: ReadFile.cpp 프로젝트: joevandyk/osg
Node* osgDB::readNodeFiles(std::vector<std::string>& commandLine,const ReaderWriter::Options* options)
{
    typedef std::vector<osg::Node*> NodeList;
    NodeList nodeList;

    // note currently doesn't delete the loaded file entries from the command line yet...

    for(std::vector<std::string>::iterator itr=commandLine.begin();
        itr!=commandLine.end();
        ++itr)
    {
        if ((*itr)[0]!='-')
        {
            // not an option so assume string is a filename.
            osg::Node *node = osgDB::readNodeFile( *itr , options );

            if( node != (osg::Node *)0L )
            {
                if (node->getName().empty()) node->setName( *itr );
                nodeList.push_back(node);
            }

        }
    }
    
    if (nodeList.empty())
    {
        return NULL;
    }

    if (nodeList.size()==1)
    {
        return nodeList.front();
    }
    else  // size >1
    {
        osg::Group* group = new osg::Group;
        for(NodeList::iterator itr=nodeList.begin();
            itr!=nodeList.end();
            ++itr)
        {
            group->addChild(*itr);
        }

        return group;
    }
    
}
예제 #3
0
        virtual ReadResult readNode(std::istream& fin, const Options* options) const
        {
            loadWrappers();

            fin.imbue(std::locale::classic());

            Input fr;
            fr.attach(&fin);
            fr.setOptions(options);

            typedef std::vector<osg::Node*> NodeList;
            NodeList nodeList;

            // load all nodes in file, placing them in a group.
            while(!fr.eof())
            {
                Node *node = fr.readNode();
                if (node) nodeList.push_back(node);
                else fr.advanceOverCurrentFieldOrBlock();
            }

            if  (nodeList.empty())
            {
                return ReadResult("No data loaded");
            }
            else if (nodeList.size()==1)
            {
                return nodeList.front();
            }
            else
            {
                Group* group = new Group;
                group->setName("import group");
                for(NodeList::iterator itr=nodeList.begin();
                    itr!=nodeList.end();
                    ++itr)
                {
                    group->addChild(*itr);
                }
                return group;
            }

        }
예제 #4
0
Node* osgDB::readNodeFiles(std::vector<std::string>& fileList,const Options* options)
{
    typedef std::vector<osg::Node*> NodeList;
    NodeList nodeList;

    for(std::vector<std::string>::iterator itr=fileList.begin();
            itr!=fileList.end();
            ++itr)
    {
        osg::Node *node = osgDB::readNodeFile( *itr , options );

        if( node != (osg::Node *)0L )
        {
            if (node->getName().empty()) node->setName( *itr );
            nodeList.push_back(node);
        }

    }

    if (nodeList.empty())
    {
        return NULL;
    }

    if (nodeList.size()==1)
    {
        return nodeList.front();
    }
    else  // size >1
    {
        osg::Group* group = new osg::Group;
        for(NodeList::iterator itr=nodeList.begin();
                itr!=nodeList.end();
                ++itr)
        {
            group->addChild(*itr);
        }

        return group;
    }

}
예제 #5
0
Node* osgDB::readNodeFiles(osg::ArgumentParser& arguments,const Options* options)
{

    typedef std::vector< osg::ref_ptr<osg::Node> > NodeList;
    NodeList nodeList;

    std::string filename;

    while (arguments.read("--file-cache",filename))
    {
        osgDB::Registry::instance()->setFileCache(new osgDB::FileCache(filename));
    }

    while (arguments.read("--image",filename))
    {
        osg::ref_ptr<osg::Image> image = readImageFile(filename.c_str(), options);
        if (image.valid())
        {
            osg::Geode* geode = osg::createGeodeForImage(image.get());

            if (image->isImageTranslucent())
            {
                OSG_INFO<<"Image "<<image->getFileName()<<" is translucent; setting up blending."<<std::endl;
                geode->getOrCreateStateSet()->setMode(GL_BLEND, osg::StateAttribute::ON);
                geode->getOrCreateStateSet()->setRenderingHint(osg::StateSet::TRANSPARENT_BIN);
            }

            nodeList.push_back(geode);
        }
    }

    while (arguments.read("--movie",filename))
    {
        osg::ref_ptr<osg::Image> image = readImageFile(filename.c_str(), options);
        osg::ref_ptr<osg::ImageStream> imageStream = dynamic_cast<osg::ImageStream*>(image.get());
        if (imageStream.valid())
        {
            bool flip = image->getOrigin()==osg::Image::TOP_LEFT;

            // start the stream playing.
            imageStream->play();

            osg::ref_ptr<osg::Geometry> pictureQuad = 0;

            bool useTextureRectangle = true;
            if (useTextureRectangle)
            {
                pictureQuad = osg::createTexturedQuadGeometry(osg::Vec3(0.0f,0.0f,0.0f),
                              osg::Vec3(image->s(),0.0f,0.0f),
                              osg::Vec3(0.0f,0.0f,image->t()),
                              0.0f, flip ? image->t() : 0.0, image->s(), flip ? 0.0 : image->t());

                pictureQuad->getOrCreateStateSet()->setTextureAttributeAndModes(0,
                        new osg::TextureRectangle(image.get()),
                        osg::StateAttribute::ON);
            }
            else
            {
                pictureQuad = osg::createTexturedQuadGeometry(osg::Vec3(0.0f,0.0f,0.0f),
                              osg::Vec3(image->s(),0.0f,0.0f),
                              osg::Vec3(0.0f,0.0f,image->t()),
                              0.0f, flip ? 1.0f : 0.0f , 1.0f, flip ? 0.0f : 1.0f);

                pictureQuad->getOrCreateStateSet()->setTextureAttributeAndModes(0,
                        new osg::Texture2D(image.get()),
                        osg::StateAttribute::ON);
            }

            if (pictureQuad.valid())
            {
                osg::ref_ptr<osg::Geode> geode = new osg::Geode;
                geode->addDrawable(pictureQuad.get());
                nodeList.push_back(geode.get());

            }
        }
        else if (image.valid())
        {
            nodeList.push_back(osg::createGeodeForImage(image.get()));
        }
    }

    while (arguments.read("--dem",filename))
    {
        osg::HeightField* hf = readHeightFieldFile(filename.c_str(), options);
        if (hf)
        {
            osg::Geode* geode = new osg::Geode;
            geode->addDrawable(new osg::ShapeDrawable(hf));
            nodeList.push_back(geode);
        }
    }

    // note currently doesn't delete the loaded file entries from the command line yet...
    for(int pos=1; pos<arguments.argc(); ++pos)
    {
        if (!arguments.isOption(pos))
        {
            // not an option so assume string is a filename.
            osg::Node *node = osgDB::readNodeFile( arguments[pos], options);

            if(node)
            {
                if (node->getName().empty()) node->setName( arguments[pos] );
                nodeList.push_back(node);
            }

        }
    }

    if (nodeList.empty())
    {
        return NULL;
    }

    if (nodeList.size()==1)
    {
        return nodeList.front().release();
    }
    else  // size >1
    {
        osg::Group* group = new osg::Group;
        for(NodeList::iterator itr=nodeList.begin();
                itr!=nodeList.end();
                ++itr)
        {
            group->addChild((*itr).get());
        }

        return group;
    }

}
osg::ref_ptr<osg::Node> p3d::readShowFiles(osg::ArgumentParser& arguments,const osgDB::ReaderWriter::Options* options)
{
    osg::ref_ptr<osgDB::Options> local_options = createOptions(options);
    local_options->setOptionString("main");

    typedef std::vector< osg::ref_ptr<osg::Node> > NodeList;
    NodeList nodeList;

    std::string filename;
    while (arguments.read("--image",filename))
    {
        osg::ref_ptr<osg::Image> image = readImageFile(filename.c_str(), local_options.get());
        if (image.valid()) nodeList.push_back(osg::createGeodeForImage(image.get()));
    }

    while (arguments.read("--movie",filename))
    {
        osg::ref_ptr<osg::Image> image = readImageFile(filename.c_str(), local_options.get());
        osg::ref_ptr<osg::ImageStream> imageStream = dynamic_cast<osg::ImageStream*>(image.get());
        if (image.valid())
        {
            imageStream->play();
            nodeList.push_back(osg::createGeodeForImage(imageStream.get()));
        }
    }

    while (arguments.read("--dem",filename))
    {
        osg::HeightField* hf = readHeightFieldFile(filename.c_str(), local_options.get());
        if (hf)
        {
            osg::Geode* geode = new osg::Geode;
            geode->addDrawable(new osg::ShapeDrawable(hf));
            nodeList.push_back(geode);
        }
    }

    // note currently doesn't delete the loaded file entries from the command line yet...
    for(int pos=1;pos<arguments.argc();++pos)
    {
        if (!arguments.isOption(pos))
        {
            // not an option so assume string is a filename.
            osg::Node *node = osgDB::readNodeFile( arguments[pos], local_options.get());

            if(node)
            {
                if (node->getName().empty()) node->setName( arguments[pos] );
                nodeList.push_back(node);
            }
        }
    }
    
    if (nodeList.empty())
    {
        return NULL;
    }
    
    osg::ref_ptr<osg::Node> root;

    if (nodeList.size()==1)
    {
        root = nodeList.front().get();
    }
    else  // size >1
    {
        
        osg::Switch* sw = new osg::Switch;
        for(NodeList::iterator itr=nodeList.begin();
            itr!=nodeList.end();
            ++itr)
        {
            sw->addChild((*itr).get());
        }
        sw->setSingleChildOn(0);
        
        sw->setEventCallback(new p3d::ShowEventHandler());

        root = sw;
    }

    if (root.valid())
    {
        osg::notify(osg::INFO)<<"Got node now adding callback"<<std::endl;
    
        AddVolumeEditingCallbackVisitor avecv;
        root->accept(avecv);
    }

    return root;
}
예제 #7
0
  //
  // FindLoops
  //
  // Find loops and build loop forest using Havlak's algorithm, which
  // is derived from Tarjan. Variable names and step numbering has
  // been chosen to be identical to the nomenclature in Havlak's
  // paper (which is similar to the one used by Tarjan).
  //
  void FindLoops() {
    if (!CFG_->GetStartBasicBlock()) return;

    int                size = CFG_->GetNumNodes();

    IntSetVector       non_back_preds(size);
    IntListVector      back_preds(size);
    IntVector          header(size);
    CharVector         type(size);
    IntVector          last(size);
    NodeVector         nodes(size);
    BasicBlockMap      number;

    // Step a:
    //   - initialize all nodes as unvisited.
    //   - depth-first traversal and numbering.
    //   - unreached BB's are marked as dead.
    //
    for (MaoCFG::NodeMap::iterator bb_iter =
           CFG_->GetBasicBlocks()->begin();
         bb_iter != CFG_->GetBasicBlocks()->end(); ++bb_iter) {
      number[(*bb_iter).second] = kUnvisited;
    }

    DFS(CFG_->GetStartBasicBlock(), &nodes, &number, &last, 0);

    // Step b:
    //   - iterate over all nodes.
    //
    //   A backedge comes from a descendant in the DFS tree, and non-backedges
    //   from non-descendants (following Tarjan).
    //
    //   - check incoming edges 'v' and add them to either
    //     - the list of backedges (back_preds) or
    //     - the list of non-backedges (non_back_preds)
    //
    for (int w = 0; w < size; w++) {
      header[w] = 0;
      type[w] = BB_NONHEADER;

      BasicBlock *node_w = nodes[w].bb();
      if (!node_w) {
        type[w] = BB_DEAD;
        continue;  // dead BB
      }

      if (node_w->GetNumPred()) {
        for (BasicBlockIter inedges = node_w->in_edges()->begin();
             inedges != node_w->in_edges()->end(); ++inedges) {
          BasicBlock     *node_v = *inedges;

          int v = number[ node_v ];
          if (v == kUnvisited) continue;  // dead node

          if (IsAncestor(w, v, &last))
            back_preds[w].push_back(v);
          else
            non_back_preds[w].insert(v);
        }
      }
    }

    // Start node is root of all other loops.
    header[0] = 0;

    // Step c:
    //
    // The outer loop, unchanged from Tarjan. It does nothing except
    // for those nodes which are the destinations of backedges.
    // For a header node w, we chase backward from the sources of the
    // backedges adding nodes to the set P, representing the body of
    // the loop headed by w.
    //
    // By running through the nodes in reverse of the DFST preorder,
    // we ensure that inner loop headers will be processed before the
    // headers for surrounding loops.
    //
    for (int w = size-1; w >= 0; w--) {
      NodeList    node_pool;  // this is 'P' in Havlak's paper
      BasicBlock *node_w = nodes[w].bb();
      if (!node_w) continue;  // dead BB

      // Step d:
      IntList::iterator back_pred_iter  = back_preds[w].begin();
      IntList::iterator back_pred_end   = back_preds[w].end();
      for (; back_pred_iter != back_pred_end; back_pred_iter++) {
        int v = *back_pred_iter;
        if (v != w)
          node_pool.push_back(nodes[v].FindSet());
        else
          type[w] = BB_SELF;
      }

      // Copy node_pool to worklist.
      //
      NodeList worklist;
      NodeList::iterator niter  = node_pool.begin();
      NodeList::iterator nend   = node_pool.end();
      for (;  niter != nend; ++niter)
        worklist.push_back(*niter);

      if (!node_pool.empty())
        type[w] = BB_REDUCIBLE;

      // work the list...
      //
      while (!worklist.empty()) {
        UnionFindNode x = *worklist.front();
        worklist.pop_front();

        // Step e:
        //
        // Step e represents the main difference from Tarjan's method.
        // Chasing upwards from the sources of a node w's backedges. If
        // there is a node y' that is not a descendant of w, w is marked
        // the header of an irreducible loop, there is another entry
        // into this loop that avoids w.
        //

        // The algorithm has degenerated. Break and
        // return in this case.
        //
        size_t non_back_size = non_back_preds[x.dfs_number()].size();
        if (non_back_size > kMaxNonBackPreds) {
          lsg_->KillAll();
          return;
        }

        IntSet::iterator non_back_pred_iter =
          non_back_preds[x.dfs_number()].begin();
        IntSet::iterator non_back_pred_end  =
          non_back_preds[x.dfs_number()].end();
        for (; non_back_pred_iter != non_back_pred_end; non_back_pred_iter++) {
          UnionFindNode  y     = nodes[*non_back_pred_iter];
          UnionFindNode *ydash = y.FindSet();

          if (!IsAncestor(w, ydash->dfs_number(), &last)) {
            type[w] = BB_IRREDUCIBLE;
            non_back_preds[w].insert(ydash->dfs_number());
          } else {
            if (ydash->dfs_number() != w) {
              NodeList::iterator nfind = find(node_pool.begin(),
                                              node_pool.end(), ydash);
              if (nfind == node_pool.end()) {
                worklist.push_back(ydash);
                node_pool.push_back(ydash);
              }
            }
          }
        }
      }

      // Collapse/Unionize nodes in a SCC to a single node
      // For every SCC found, create a loop descriptor and link it in.
      //
      if (!node_pool.empty() || (type[w] == BB_SELF)) {
        SimpleLoop* loop = lsg_->CreateNewLoop();

        // At this point, one can set attributes to the loop, such as:
        //
        // the bottom node:
        //    IntList::iterator iter  = back_preds[w].begin();
        //    loop bottom is: nodes[*backp_iter].node);
        //
        // the number of backedges:
        //    back_preds[w].size()
        //
        // whether this loop is reducible:
        //    type[w] != BB_IRREDUCIBLE
        //
        // TODO(rhundt): Define those interfaces in the Loop Forest.
        //
        nodes[w].set_loop(loop);

        for (niter = node_pool.begin(); niter != node_pool.end(); niter++) {
          UnionFindNode  *node = (*niter);

          // Add nodes to loop descriptor.
          header[node->dfs_number()] = w;
          node->Union(&nodes[w]);

          // Nested loops are not added, but linked together.
          if (node->loop())
            node->loop()->set_parent(loop);
          else
            loop->AddNode(node->bb());
        }

        lsg_->AddLoop(loop);
      }  // node_pool.size
    }  // Step c
  }  // FindLoops
예제 #8
0
void launcher::Launcher::loadSceneFromFile(string fileName, string initialStateGroup)
{
    Engine& engine = Engine::getInstance();

    // FIXME should we validate task file against xml schema?
    auto& ffls = FileFolderLookupService::getInstance();
    string fname = ffls.lookupFile(fileName);
    LOG_DEBUG("Loading scene from file " << fname);
// parse file
    Doc doc = Doc::fromFile(fname);
    xml::Node rootNode = doc.getRootElement();
    // read task parameters
    NodeList taskNodes = rootNode.xpath("/task");
    if( taskNodes.size() != 1 )
        THROW_INVALID_INPUT("Config file should contain one <task/> element");
    for(auto& taskNode: taskNodes)
    {
        int numberOfSnaps = lexical_cast<int>(taskNode["numberOfSnaps"]);
        int stepsPerSnap = lexical_cast<int>(taskNode["stepsPerSnap"]);
        engine.setNumberOfSnaps(numberOfSnaps);
        engine.setStepsPerSnap(stepsPerSnap);
    }

    NodeList loadPluginsList = rootNode.xpath("/task/system/loadPlugin");
    for (auto& plugin: loadPluginsList){
        engine.loadPlugin(plugin["name"]);
    }

    // reading system properties
    NodeList defaultContactCalculatorList = rootNode.xpath("/task/system/defaultContactCalculator");
    if( defaultContactCalculatorList.size() > 1 )
        THROW_INVALID_INPUT("Config file can contain only one <defaultContactCalculator/> element");
    if( defaultContactCalculatorList.size() == 1 )
    {
        xml::Node defaultContactCalculator = defaultContactCalculatorList.front();
        string type = defaultContactCalculator["type"];
        if( engine.getContactCalculator(type) == NULL )
        {
            THROW_INVALID_INPUT("Unknown contact calculator requested: " + type);
        }
        engine.replaceDefaultContactCondition( 
                new ContactCondition(NULL, new StepPulseForm(-1, -1), engine.getContactCalculator(type) ) 
        );
        LOG_INFO("Default contact calculator set to: " + type);
        if (type == "AdhesionContactDestroyCalculator")
        {
            real adhesionThreshold = lexical_cast<real>(defaultContactCalculator["adhesionThreshold"]);
            engine.getContactCondition(0)->setConditionParam(adhesionThreshold);
        }
		if (type == "ClosedFractureContactCalculator")
        {
			NodeList areaNodes = defaultContactCalculator.getChildrenByName("area");
			if (areaNodes.size() != 1)
				THROW_INVALID_INPUT("Exactly one area element can be provided for ClosedFractureCalculator");
			Area* area = readArea(areaNodes[0]);
			(static_cast<gcm::ClosedFractureContactCalculator*>
				(engine.getContactCalculator(type)))->setFracArea(area);
        }
		if (type == "OpenFractureContactCalculator")
        {
			NodeList areaNodes = defaultContactCalculator.getChildrenByName("area");
			if (areaNodes.size() != 1)
				THROW_INVALID_INPUT("Exactly one area element can be provided for ClosedFractureCalculator");
			Area* area = readArea(areaNodes[0]);
			(static_cast<gcm::OpenFractureContactCalculator*>
				(engine.getContactCalculator(type)))->setFracArea(area);
        }
    }
    
    NodeList defaultBorderCalculatorList = rootNode.xpath("/task/system/defaultBorderCalculator");
    if( defaultBorderCalculatorList.size() > 1 )
        THROW_INVALID_INPUT("Config file can contain only one <defaultBorderCalculator/> element");
    if( defaultBorderCalculatorList.size() == 1 )
    {
        xml::Node defaultBorderCalculator = defaultBorderCalculatorList.front();
        string type = defaultBorderCalculator["type"];
        if( engine.getBorderCalculator(type) == NULL )
        {
            THROW_INVALID_INPUT("Unknown border calculator requested: " + type);
        }
        engine.replaceDefaultBorderCondition( 
                new BorderCondition(NULL, new StepPulseForm(-1, -1), engine.getBorderCalculator(type) ) 
        );
        LOG_INFO("Default border calculator set to: " + type);
    }

    NodeList defaultRheoCalculatorList = rootNode.xpath("/task/system/defaultRheologyCalculator");
    if( defaultRheoCalculatorList.size() > 1 )
        THROW_INVALID_INPUT("Config file can contain only one <defaultRheologyCalculator/> element");
    if( defaultRheoCalculatorList.size() == 1 )
    {
        xml::Node defaultRheoCalculator = defaultRheoCalculatorList.front();
        string type = defaultRheoCalculator["type"];
        engine.setDefaultRheologyCalculatorType(type);
        LOG_INFO("Default rheology calculator set to: " + type);
    }

    NodeList defaultFailureModelList = rootNode.xpath("/task/system/defaultFailureModel");
    if( defaultFailureModelList.size() > 1 )
        THROW_INVALID_INPUT("Config file can contain only one <defaultFailureModelList/> element");
    if( defaultFailureModelList.size() == 1 )
    {
        xml::Node defaultFailureModel = defaultFailureModelList.front();
        string type = defaultFailureModel["type"];
        engine.setDefaultFailureModelType(type);
        LOG_INFO("Default failure model set to: " + type);
    }
    
    NodeList contactThresholdList = rootNode.xpath("/task/system/contactThreshold");
    if( contactThresholdList.size() > 1 )
        THROW_INVALID_INPUT("Config file can contain only one <contactThreshold/> element");
    if( contactThresholdList.size() == 1 )
    {
        xml::Node contactThreshold = contactThresholdList.front();
        string measure = contactThreshold["measure"];
        real value = lexical_cast<real>(contactThreshold["value"]);
        if( measure == "avgH" )
        {
            engine.setContactThresholdType(CONTACT_THRESHOLD_BY_AVG_H);
            engine.setContactThresholdFactor(value);
        }
        else if( measure == "lambdaTau" )
        {
            engine.setContactThresholdType(CONTACT_THRESHOLD_BY_MAX_LT);
            engine.setContactThresholdFactor(value);
        }
        else if( measure == "abs" )
        {
            engine.setContactThresholdType(CONTACT_THRESHOLD_FIXED);
            engine.setContactThresholdFactor(value);
        }
        else
        {
            THROW_INVALID_INPUT("Unknown units of measure for <contactThreshold/>");
        }
    }

    NodeList collisionDetectorList = rootNode.xpath("/task/system/collisionDetector");
    if( collisionDetectorList.size() > 1 )
        THROW_INVALID_INPUT("Config file can contain only one <collisionDetector/> element");
    if( collisionDetectorList.size() == 1 )
    {
        xml::Node collisionDetector = collisionDetectorList.front();
        string isStatic = collisionDetector["static"];
        if( isStatic == "true" )
        {
            engine.setCollisionDetectorStatic(true);
        }
        else if( isStatic == "false" )
        {
            engine.setCollisionDetectorStatic(false);
        }
    }
    
    NodeList meshMovementList = rootNode.xpath("/task/system/meshMovement");
    if( meshMovementList.size() > 1 )
        THROW_INVALID_INPUT("Config file can contain only one <meshMovement/> element");
    if( meshMovementList.size() == 1 )
    {
        xml::Node meshMovement = meshMovementList.front();
        string meshMovementType = meshMovement["type"];
        if( meshMovementType == "none" )
        {
            engine.setMeshesMovable(false);
        }
    }
    
    NodeList timeStepList = rootNode.xpath("/task/system/timeStep");
    if( timeStepList.size() > 1 )
        THROW_INVALID_INPUT("Config file can contain only one <timeStepList/> element");
    if( timeStepList.size() == 1 )
    {
        xml::Node timeStep = timeStepList.front();
        real value = lexical_cast<real>(timeStep["multiplier"]);
        engine.setTimeStepMultiplier(value);
        LOG_INFO("Using time step multiplier: " << value);
    }
    
    NodeList plasticityTypeList = rootNode.xpath("/task/system/plasticity");
    if( plasticityTypeList.size() > 1 )
        THROW_INVALID_INPUT("Config file can contain only one <plasticity/> element");
    string plasticityType = PLASTICITY_TYPE_NONE;
    if( plasticityTypeList.size() == 1 )
    {
        plasticityType = plasticityTypeList.front()["type"];
    }
    
    NodeList failureModeList = rootNode.xpath("/task/system/failure");
    if( failureModeList.size() > 1 )
        THROW_INVALID_INPUT("Config file can contain only one <failure/> element");
    string failureMode = FAILURE_MODE_DISCRETE;
    if( failureModeList.size() == 1 )
    {
        failureMode = failureModeList.front()["mode"];
    }

    string matrixDecompositionImplementation = "numerical";
    NodeList matrixDecompositionList = rootNode.xpath("/task/system/matrixDecomposition");
    if( matrixDecompositionList.size() > 1 )
        THROW_INVALID_INPUT("Config file can contain only one <matrixDecomposition/> element");
    if( matrixDecompositionList.size() == 1 )
    {
        xml::Node matrixDecomposition = matrixDecompositionList.front();
        matrixDecompositionImplementation = matrixDecomposition["implementation"];
    }

    LOG_INFO("Using matrix decomposition: " << matrixDecompositionImplementation);

    loadMaterialLibrary("materials");
    
    // reading materials
    loadMaterialsFromXml(rootNode.xpath("/task/materials/material"));


    AABB globalScene;

    // search for bodies
    NodeList bodyNodes = rootNode.xpath("/task/bodies/body");

    // prepare basic bodies parameters
    for(auto& bodyNode: bodyNodes)
    {
        string id = bodyNode.getAttributes()["id"];
        LOG_DEBUG("Loading body '" << id << "'");
        // create body instance
        Body* body = new Body(id);
        body->setRheologyCalculatorType(engine.getDefaultRheologyCalculatorType());
        // set rheology
        NodeList rheologyNodes = bodyNode.getChildrenByName("rheology");
        if (rheologyNodes.size() > 1)
            THROW_INVALID_INPUT("Only one rheology element allowed for body declaration");
        if (rheologyNodes.size()) {
            // We can do smth here when we have more than one rheology calculators
        }

        // preload meshes for dispatcher
        NodeList meshNodes = bodyNode.getChildrenByName("mesh");
        for(auto& meshNode: meshNodes)
        {         
            string type = meshNode["type"];

            LOG_INFO("Preparing mesh for body '" << id << "'");

            AABB localScene;
            int slicingDirection;
            int numberOfNodes;

            if (type == Geo2MeshLoader::MESH_TYPE)
                Geo2MeshLoader::getInstance().preLoadMesh(meshNode, localScene, slicingDirection, numberOfNodes);
            else if (type == Msh2MeshLoader::MESH_TYPE)
                Msh2MeshLoader::getInstance().preLoadMesh(meshNode, localScene, slicingDirection, numberOfNodes);
            else if (type == Ani3D2MeshLoader::MESH_TYPE)
                Ani3D2MeshLoader::getInstance().preLoadMesh(meshNode, localScene, slicingDirection, numberOfNodes);
            else if (type == Vtu2MeshLoader::MESH_TYPE)
                Vtu2MeshLoader::getInstance().preLoadMesh(meshNode, localScene, slicingDirection, numberOfNodes);
            else if (type == Vtu2MeshZoneLoader::MESH_TYPE)
                Vtu2MeshZoneLoader::getInstance().preLoadMesh(meshNode, localScene, slicingDirection, numberOfNodes);
            else if (type == BasicCubicMeshLoader::MESH_TYPE)
                BasicCubicMeshLoader::getInstance().preLoadMesh(meshNode, localScene, slicingDirection, numberOfNodes);
            else if (type == RectangularCutCubicMeshLoader::MESH_TYPE)
                RectangularCutCubicMeshLoader::getInstance().preLoadMesh(meshNode, localScene, slicingDirection, numberOfNodes);
            else if (type == MarkeredMeshGeoLoader::MESH_TYPE)
                MarkeredMeshGeoLoader::getInstance().preLoadMesh(meshNode, localScene, slicingDirection, numberOfNodes);
            else
                THROW_UNSUPPORTED("Specified mesh loader is not supported");

            // transform meshes
            NodeList transformNodes = bodyNode.getChildrenByName("transform");
            for(auto& transformNode: transformNodes)
            {
                string transformType = transformNode["type"];
                if ( transformType == "translate" )
                {
                    real x = lexical_cast<real>(transformNode["moveX"]);
                    real y = lexical_cast<real>(transformNode["moveY"]);
                    real z = lexical_cast<real>(transformNode["moveZ"]);
                    LOG_DEBUG("Moving body: [" << x << "; " << y << "; " << z << "]");
                    localScene.transfer(x, y, z);
                } 
                if ( transformType == "scale" )
                {
                    real x0 = lexical_cast<real>(transformNode["x0"]);
                    real y0 = lexical_cast<real>(transformNode["y0"]);
                    real z0 = lexical_cast<real>(transformNode["z0"]);
                    real scaleX = lexical_cast<real>(transformNode["scaleX"]);
                    real scaleY = lexical_cast<real>(transformNode["scaleY"]);
                    real scaleZ = lexical_cast<real>(transformNode["scaleZ"]);
                    LOG_DEBUG("Scaling body: [" << x0 << "; " << scaleX << "; " 
                                        << y0 << "; " << scaleY << "; " << z0 << "; " << scaleZ << "]");
                    localScene.scale(x0, y0, z0, scaleX, scaleY, scaleZ);
                }
            }
            LOG_DEBUG("Mesh preloaded. Mesh size: " << localScene << " Number of nodes: " << numberOfNodes);

            engine.getDispatcher()->addBodyOutline(id, localScene);
            engine.getDispatcher()->addBodySlicingDirection(id, slicingDirection);
            engine.getDispatcher()->addBodyNodesNumber(id, numberOfNodes);

            if( isinf(globalScene.maxX) )
            {
                globalScene = localScene;
            }
            else
            {
                for( int k = 0; k < 3; k++ )
                {
                    if( globalScene.min_coords[k] > localScene.min_coords[k] )
                        globalScene.min_coords[k] = localScene.min_coords[k];
                    if( globalScene.max_coords[k] < localScene.max_coords[k] )
                        globalScene.max_coords[k] = localScene.max_coords[k];
                }
            }
        }

        // add body to scene
        engine.addBody(body);
    }

    engine.setScene(globalScene);
    LOG_DEBUG("Total scene: " << engine.getScene());

    // run dispatcher
    engine.getDispatcher()->prepare(engine.getNumberOfWorkers(), &globalScene);
    engine.getDataBus()->syncOutlines();
    for( int i = 0; i < engine.getNumberOfWorkers(); i++)
    {
        LOG_DEBUG("Area scheduled for worker " << i << ": " << *(engine.getDispatcher()->getOutline(i)));
    }

    // read meshes for all bodies
    for(auto& bodyNode: bodyNodes)
    {
        string id = bodyNode.getAttributes()["id"];
        LOG_DEBUG("Loading meshes for body '" << id << "'");
        // get body instance
        Body* body = engine.getBodyById(id);

        // FIXME - WA - we need this to determine isMine() correctly for moved points
        real dX = 0;
        real dY = 0;
        real dZ = 0;
        NodeList tmpTransformNodes = bodyNode.getChildrenByName("transform");
        for(auto& transformNode: tmpTransformNodes)
        {
            string transformType = transformNode["type"];
            if ( transformType == "translate" )
            {
                dX += lexical_cast<real>(transformNode["moveX"]);
                dY += lexical_cast<real>(transformNode["moveY"]);
                dZ += lexical_cast<real>(transformNode["moveZ"]);
            }
            if ( transformType == "scale" )
            {
                //real x0 = lexical_cast<real>(transformNode["x0"]);
                //real y0 = lexical_cast<real>(transformNode["y0"]);
                //real z0 = lexical_cast<real>(transformNode["z0"]);
                //real scaleX = lexical_cast<real>(transformNode["scaleX"]);
                //real scaleY = lexical_cast<real>(transformNode["scaleY"]);
                //real scaleZ = lexical_cast<real>(transformNode["scaleZ"]);
            }
        }
        engine.getDispatcher()->setTransferVector(dX, dY, dZ, id);

        // load meshes
        NodeList meshNodes = bodyNode.getChildrenByName("mesh");
        for(auto& meshNode: meshNodes)
        {
            LOG_INFO("Loading mesh for body '" << id << "'");

            string type = meshNode["type"];

            Mesh* mesh = nullptr;

            if (type == Geo2MeshLoader::MESH_TYPE)
                mesh = Geo2MeshLoader::getInstance().load(meshNode, body);
            else if (type == Msh2MeshLoader::MESH_TYPE)
                mesh = Msh2MeshLoader::getInstance().load(meshNode, body);
            else if (type == Ani3D2MeshLoader::MESH_TYPE)
                mesh = Ani3D2MeshLoader::getInstance().load(meshNode, body);
            else if (type == Vtu2MeshLoader::MESH_TYPE)
                mesh = Vtu2MeshLoader::getInstance().load(meshNode, body);
            else if (type == Vtu2MeshZoneLoader::MESH_TYPE)
                mesh = Vtu2MeshZoneLoader::getInstance().load(meshNode, body);
            else if (type == BasicCubicMeshLoader::MESH_TYPE)
                mesh = BasicCubicMeshLoader::getInstance().load(meshNode, body);
            else if (type == RectangularCutCubicMeshLoader::MESH_TYPE)
                mesh = RectangularCutCubicMeshLoader::getInstance().load(meshNode, body);
            else if (type == MarkeredMeshGeoLoader::MESH_TYPE)
                mesh = MarkeredMeshGeoLoader::getInstance().load(meshNode, body);            
	    LOG_INFO("Loaded mesh for body '" << id << "', started attaching to body");
            // attach mesh to body
            body->attachMesh(mesh);
            mesh->setBodyNum( engine.getBodyNum(id) );
            LOG_INFO("Mesh '" << mesh->getId() << "' of type '" <<  type << "' created. "
                        << "Number of nodes: " << mesh->getNodesNumber() << ".");
        }

        // transform meshes
        NodeList transformNodes = bodyNode.getChildrenByName("transform");
        for(auto& transformNode: transformNodes)
        {
            string transformType = transformNode["type"];
            if( transformType == "translate" )
            {
                real x = lexical_cast<real>(transformNode["moveX"]);
                real y = lexical_cast<real>(transformNode["moveY"]);
                real z = lexical_cast<real>(transformNode["moveZ"]);
                LOG_DEBUG("Moving body: [" << x << "; " << y << "; " << z << "]");
                body->getMeshes()->transfer(x, y, z);
            }
            if ( transformType == "scale" )
            {
                real x0 = lexical_cast<real>(transformNode["x0"]);
                real y0 = lexical_cast<real>(transformNode["y0"]);
                real z0 = lexical_cast<real>(transformNode["z0"]);
                real scaleX = lexical_cast<real>(transformNode["scaleX"]);
                real scaleY = lexical_cast<real>(transformNode["scaleY"]);
                real scaleZ = lexical_cast<real>(transformNode["scaleZ"]);
                LOG_DEBUG("Scaling body: [" << x0 << "; " << scaleX << "; " 
                                << y0 << "; " << scaleY << "; " << z0 << "; " << scaleZ << "]");
                body->getMeshes()->scale(x0, y0, z0, scaleX, scaleY, scaleZ);
            }
        }

        // FIXME - Part of the WA above
        if( engine.getNumberOfWorkers() != 1 )
            engine.getDispatcher()->setTransferVector(/*-dX, -dY, -dZ,*/0, 0, 0, id);

        // set material properties
        NodeList matNodes = bodyNode.getChildrenByName("material");
        if (matNodes.size() < 1)
            THROW_INVALID_INPUT("Material not set");
        for(auto& matNode: matNodes)
        {
            string id = matNode["id"];
            // FIXME this code seems to be dead
            //Material* mat = engine.getMaterial(id);
            Mesh* mesh = body->getMeshes();

            NodeList areaNodes = matNode.getChildrenByName("area");
            int matId = engine.getMaterialIndex(id);
            usedMaterialsIds.push_back(matId);
            
            if (areaNodes.size() == 0)
            {
                mesh->setRheology( matId );
            }
            else if (areaNodes.size() == 1)
            {
                Area* matArea = readArea(areaNodes.front());
                if(matArea == NULL)
                    THROW_INVALID_INPUT("Can not read area");
                mesh->setRheology( matId, matArea );
            }
            else
            {
                THROW_INVALID_INPUT("Only one or zero area elements are allowed for material");
            }
        }
        LOG_DEBUG("Body '" << id << "' loaded");
    }

    NodeList initialStateNodes = rootNode.xpath("/task/initialState" + (initialStateGroup == "" ? "" : "[@group=\"" + initialStateGroup + "\"]"));
    if (initialStateGroup != "" && initialStateNodes.size() == 0)
        THROW_INVALID_ARG("Initial state group not found");
    for(auto& initialStateNode: initialStateNodes)
    {
        NodeList areaNodes = initialStateNode.getChildrenByName("area");
        NodeList valuesNodes = initialStateNode.getChildrenByName("values");
        NodeList pWaveNodes = initialStateNode.getChildrenByName("pWave");
        if (areaNodes.size() == 0)
            THROW_INVALID_INPUT("Area element should be provided for initial state");
        if (valuesNodes.size() > 1)
            THROW_INVALID_INPUT("Only one values element allowed for initial state");
        if (pWaveNodes.size() > 1)
            THROW_INVALID_INPUT("Only one pWave element allowed for initial state");
        if ((valuesNodes.size() == 1 && pWaveNodes.size() == 1) || (valuesNodes.size() == 0 && pWaveNodes.size() == 0))
            THROW_INVALID_INPUT("You have to provide initial state by using exactly one tag of allowed ones: values, pWave");;

        auto useValues = valuesNodes.size() == 1;
        real values[9];

        std::function<void(CalcNode&)> setter;

        if (useValues)
        {
            xml::Node valuesNode = valuesNodes.front();

            vector<string> names = {"vx", "vy", "vz", "sxx", "sxy", "sxz", "syy", "syz", "szz"};

            int i = 0;
            for (auto value_name: names)
            {
                string v = valuesNode.getAttributes()[value_name];
                values[i++] = v.empty() ? 0.0 : lexical_cast<real>(v);

            }
            
            LOG_DEBUG("Initial state values: "
                            << values[0] << " " << values[1] << " " << values[2] << " "
                            << values[3] << " " << values[4] << " " << values[5] << " "
                            << values[6] << " " << values[7] << " " << values[8] );
            
        }
        else
        {
            xml::Node pWaveNode = pWaveNodes.front();

            auto attrs = pWaveNode.getAttributes();

            auto direction = attrs["direction"];
            if (direction.empty())
                THROW_INVALID_INPUT("P-wave direction is not specified");

            vector<string> _direction;
            split(_direction, direction, is_any_of(";"));

            if (_direction.size() != 3)
                THROW_INVALID_INPUT("Invalid P-wave direction specified");

            auto dx = lexical_cast<real>(_direction[0]);
            auto dy = lexical_cast<real>(_direction[1]);
            auto dz = lexical_cast<real>(_direction[2]);

            Vector3 dir({dx, dy, dz});

            if (dx == 0.0 && dy == 0.0 && dz == 0.0)
                THROW_INVALID_INPUT("Invalid P-wave direction specified");

            auto scale = attrs["amplitudeScale"];
            if (scale.empty())
                THROW_INVALID_INPUT("P-wave amplitude scale is not specified");

            auto amplitudeScale = lexical_cast<real>(scale);
            if (amplitudeScale <= 0.0)
                THROW_INVALID_INPUT("P-wave amplitude must be positive");

            auto type = attrs["type"];
            if (type.empty())
                THROW_INVALID_INPUT("P-wave type is not specified");
            if (type != "compression" && type != "rarefaction")
                THROW_INVALID_INPUT("Invalid P-wave type specified");
            auto compression = type == "compression";

            setter = [=](CalcNode& node)
            {
                setIsotropicElasticPWave(node, dir, amplitudeScale, compression);
            };

        }
        for(auto& areaNode: areaNodes)
        {
            Area* stateArea = readArea(areaNode);
            if(stateArea == NULL)
                THROW_INVALID_INPUT("Can not read area");

            for( int i = 0; i < engine.getNumberOfBodies(); i++ )
            {
                if (useValues)
                   engine.getBody(i)->setInitialState(stateArea, values);
                else
                   engine.getBody(i)->setInitialState(stateArea, setter);
                engine.getBody(i)->getMeshes()->processStressState();
            }
        }
    }
    
    NodeList borderConditionNodes = rootNode.xpath("/task/borderCondition");
    for(auto& borderConditionNode: borderConditionNodes)
    {
        string calculator = borderConditionNode["calculator"];
        if( engine.getBorderCalculator(calculator) == NULL )
        {
            THROW_INVALID_INPUT("Unknown border calculator requested: " + calculator);
        }
        
        // FIXME_ASAP: calculators became statefull
        engine.getBorderCalculator(calculator)->setParameters( borderConditionNode );
        
        float startTime = lexical_cast<real>(borderConditionNode.getAttributeByName("startTime", "-1"));
        float duration = lexical_cast<real>(borderConditionNode.getAttributeByName("duration", "-1"));
        
        unsigned int conditionId = engine.addBorderCondition(
                new BorderCondition(NULL, new StepPulseForm(startTime, duration), engine.getBorderCalculator(calculator) ) 
        );
        LOG_INFO("Border condition created with calculator: " + calculator);
        
        NodeList areaNodes = borderConditionNode.getChildrenByName("area");
        if (areaNodes.size() == 0)
            THROW_INVALID_INPUT("Area should be specified for border condition");
        
        for(auto& areaNode: areaNodes)
        {
            Area* conditionArea = readArea(areaNode);
            if(conditionArea == NULL)
                THROW_INVALID_INPUT("Can not read area");

            for( int i = 0; i < engine.getNumberOfBodies(); i++ )
            {
                engine.getBody(i)->setBorderCondition(conditionArea, conditionId);
            }
        }
    }
    
    NodeList contactConditionNodes = rootNode.xpath("/task/contactCondition");
    for(auto& contactConditionNode: contactConditionNodes)
    {
        string calculator = contactConditionNode["calculator"];
        if( engine.getContactCalculator(calculator) == NULL )
        {
            THROW_INVALID_INPUT("Unknown border calculator requested: " + calculator);
        }
        
        float startTime = lexical_cast<real>(contactConditionNode.getAttributeByName("startTime", "-1"));
        float duration = lexical_cast<real>(contactConditionNode.getAttributeByName("duration", "-1"));
        
        unsigned int conditionId = engine.addContactCondition(
                new ContactCondition(NULL, new StepPulseForm(startTime, duration), engine.getContactCalculator(calculator) ) 
        );
        if (calculator == "AdhesionContactDestroyCalculator")
        {
            real adhesionThreshold = lexical_cast<real>(contactConditionNode["adhesionThreshold"]);
            engine.getContactCondition(conditionId)->setConditionParam(adhesionThreshold);
        }
        LOG_INFO("Contact condition created with calculator: " + calculator);
        
        NodeList areaNodes = contactConditionNode.getChildrenByName("area");
        if (areaNodes.size() == 0)
            THROW_INVALID_INPUT("Area should be specified for contact condition");
        
        for(auto& areaNode: areaNodes)
        {
            Area* conditionArea = readArea(areaNode);
            if(conditionArea == NULL)
                THROW_INVALID_INPUT("Can not read area");

            for( int i = 0; i < engine.getNumberOfBodies(); i++ )
            {
                engine.getBody(i)->setContactCondition(conditionArea, conditionId);
            }
        }
    }

    // create rheology matrixes
    vector<RheologyMatrixPtr> matrices;
    for (int i = 0; i < engine.getNumberOfMaterials(); i++)
    {
        MaterialPtr material = engine.getMaterial(i);
        
        bool materialUsedInTask = (std::find(usedMaterialsIds.begin(), usedMaterialsIds.end(), i) != usedMaterialsIds.end());
        
        auto props = material->getPlasticityProperties();
        bool plasticityPropsPresent = ( (props[plasticityType].size() != 0) 
                                        || (plasticityType == PLASTICITY_TYPE_NONE) );
        
        SetterPtr setter;
        DecomposerPtr decomposer;
        CorrectorPtr corrector;
        RheologyMatrixPtr matrix;

        if (material->isIsotropic())
        {
            if(materialUsedInTask)
            {
                LOG_INFO("Using \"" << plasticityType << "\" plasticity model " 
                        << "and \""  + failureMode + "\" failure mode "
                        << "for isotropic material \"" << material->getName() << "\".");
                if( !plasticityPropsPresent )
                    THROW_UNSUPPORTED("Required plasticity properties were not found.");
            }
            
            if (plasticityType == PLASTICITY_TYPE_NONE)
            {
                corrector = nullptr;
                setter = makeSetterPtr<IsotropicRheologyMatrixSetter>();
                decomposer = makeDecomposerPtr<IsotropicRheologyMatrixDecomposer>();
            }
            else if (plasticityType == PLASTICITY_TYPE_PRANDTL_RAUSS)
            {
                corrector = nullptr;
                setter = makeSetterPtr<PrandtlRaussPlasticityRheologyMatrixSetter>();
                if (matrixDecompositionImplementation == "numerical")
                    decomposer = makeDecomposerPtr<NumericalRheologyMatrixDecomposer>();
                else
                    decomposer = makeDecomposerPtr<AnalyticalRheologyMatrixDecomposer>();
            }
            else if (plasticityType == PLASTICITY_TYPE_PRANDTL_RAUSS_CORRECTOR)
            {
                corrector = makeCorrectorPtr<IdealPlasticFlowCorrector>();
                setter = makeSetterPtr<IsotropicRheologyMatrixSetter>();
                decomposer = makeDecomposerPtr<IsotropicRheologyMatrixDecomposer>();
            }
            else
            {
                THROW_UNSUPPORTED("Plasticity type\"" + plasticityType + "\" is not supported.");
            }
            
            if (failureMode == FAILURE_MODE_DISCRETE)
            {
                corrector = nullptr;
                setter = makeSetterPtr<IsotropicRheologyMatrixSetter>();
                decomposer = makeDecomposerPtr<IsotropicRheologyMatrixDecomposer>();
            }
            else if (failureMode == FAILURE_MODE_CONTINUAL)
            {
                corrector = nullptr;
                setter = makeSetterPtr<IsotropicDamagedRheologyMatrixSetter>();
                decomposer = makeDecomposerPtr<IsotropicRheologyMatrixDecomposer>();
            }
            else
            {
                THROW_UNSUPPORTED("Failure mode \"" + failureMode + "\" is not supported.");
            }
        } else 
        {
            if(materialUsedInTask)
            {
                LOG_INFO("Using \"" << plasticityType << "\" plasticity model for anisotropic material \"" << material->getName() << "\".");
                if (plasticityType != PLASTICITY_TYPE_NONE)
                    LOG_WARN("Plasticity is not supported for anisotropic materials, using elastic instead.");
            }
            
            if (failureMode == FAILURE_MODE_DISCRETE)
            {
                corrector = nullptr;
                setter = makeSetterPtr<AnisotropicRheologyMatrixSetter>();
            }
            else if (failureMode == FAILURE_MODE_CONTINUAL)
            {
                corrector = nullptr;
                setter = makeSetterPtr<AnisotropicDamagedRheologyMatrixSetter>();
            }
            else
            {
                THROW_UNSUPPORTED("Failure mode \"" + failureMode + "\" is not supported.");
            }          
            
            if( matrixDecompositionImplementation == "numerical" )
                decomposer = makeDecomposerPtr<NumericalRheologyMatrixDecomposer>();
            else
                decomposer = makeDecomposerPtr<AnalyticalRheologyMatrixDecomposer>();
        }

        matrices.push_back(makeRheologyMatrixPtr(material, setter, decomposer, corrector));
    }

    engine.setRheologyMatrices([&matrices](const CalcNode& node) -> RheologyMatrixPtr
        {
            return matrices[node.getMaterialId()];
        }
    );

    LOG_DEBUG("Running plugin-specific initializations");

    for (auto plugin: engine.getPlugins())
        plugin ->parseTask(doc);

    LOG_DEBUG("Scene loaded");
}
예제 #9
0
void DemandCalculator::CalcDemand(LinkGraphJob &job, Tscaler scaler)
{
	NodeList supplies;
	NodeList demands;
	uint num_supplies = 0;
	uint num_demands = 0;

	for (NodeID node = 0; node < job.Size(); node++) {
		scaler.AddNode(job[node]);
		if (job[node].Supply() > 0) {
			supplies.push_back(node);
			num_supplies++;
		}
		if (job[node].Demand() > 0) {
			demands.push_back(node);
			num_demands++;
		}
	}

	if (num_supplies == 0 || num_demands == 0) return;

	/* Mean acceptance attributed to each node. If the distribution is
	 * symmetric this is relative to remote supply, otherwise it is
	 * relative to remote demand. */
	scaler.SetDemandPerNode(num_demands);
	uint chance = 0;

	while (!supplies.empty() && !demands.empty()) {
		NodeID from_id = supplies.front();
		supplies.pop_front();

		for (uint i = 0; i < num_demands; ++i) {
			assert(!demands.empty());
			NodeID to_id = demands.front();
			demands.pop_front();
			if (from_id == to_id) {
				/* Only one node with supply and demand left */
				if (demands.empty() && supplies.empty()) return;

				demands.push_back(to_id);
				continue;
			}

			int32 supply = scaler.EffectiveSupply(job[from_id], job[to_id]);
			assert(supply > 0);

			/* Scale the distance by mod_dist around max_distance */
			int32 distance = this->max_distance - (this->max_distance -
					(int32)job[from_id][to_id].Distance()) * this->mod_dist / 100;

			/* Scale the accuracy by distance around accuracy / 2 */
			int32 divisor = this->accuracy * (this->mod_dist - 50) / 100 +
					this->accuracy * distance / this->max_distance + 1;

			assert(divisor > 0);

			uint demand_forw = 0;
			if (divisor <= supply) {
				/* At first only distribute demand if
				 * effective supply / accuracy divisor >= 1
				 * Others are too small or too far away to be considered. */
				demand_forw = supply / divisor;
			} else if (++chance > this->accuracy * num_demands * num_supplies) {
				/* After some trying, if there is still supply left, distribute
				 * demand also to other nodes. */
				demand_forw = 1;
			}

			demand_forw = min(demand_forw, job[from_id].UndeliveredSupply());

			scaler.SetDemands(job, from_id, to_id, demand_forw);

			if (scaler.HasDemandLeft(job[to_id])) {
				demands.push_back(to_id);
			} else {
				num_demands--;
			}

			if (job[from_id].UndeliveredSupply() == 0) break;
		}

		if (job[from_id].UndeliveredSupply() != 0) {
			supplies.push_back(from_id);
		} else {
			num_supplies--;
		}
	}
}
예제 #10
0
NodeList* Pathfinder::PathBetweenPoints(int x1, int y1, int x2, int y2) {

	// Set up all the data structures we need, lots o' stuff
	NodeList Q;
	PreviousNodeMap prev;
	PopulateListWithNodes(Q);
	Node* source = m_nodeMap[x1][y1];
	Node* dest = m_nodeMap[x2][y2];
	// Make sure source and dest are in Q
	if(find(Q.begin(), Q.end(), source) == Q.end()) {
		Q.push_back(source);
	}
	if(find(Q.begin(), Q.end(), dest) == Q.end()) {
		Q.push_back(dest);
	}
	ResetNodes(Q, x2, y2);
	source->SetDistance(0);

	while(Q.size() > 0) {
		Q.sort(NodesByScore);
		Node* u = Q.front();
		if(u == dest) {
			// found our node, break!
			break;
		}
		if(u->GetDistance() == NODE_INFINITY) {
			// In this case, no valid path from point 1 to point 2
			return NULL;
		}

		// Remove it from the unvisited queue
		Q.remove(u);

		// Update its neighbors
		int x = u->GetX();
		int y = u->GetY();

		if(x - 1 >= 0 && m_nodeMap[x-1][y]) {
			Node* toUpdate = m_nodeMap[x-1][y];
			if(u->GetDistance() + 1 < toUpdate->GetDistance()) {
				prev[toUpdate] = u;
				toUpdate->SetDistance(u->GetDistance() + 1);
			}
		}
		if(x + 1 < m_currentLevel->GetWidth() && m_nodeMap[x+1][y]) {
			Node* toUpdate = m_nodeMap[x+1][y];
			if(u->GetDistance() + 1 < toUpdate->GetDistance()) {
				prev[toUpdate] = u;
				toUpdate->SetDistance(u->GetDistance() + 1);
			}
		}
		if(y - 1 >= 0 && m_nodeMap[x][y-1]) {
			Node* toUpdate = m_nodeMap[x][y-1];
			if(u->GetDistance() + 1 < toUpdate->GetDistance()) {
				prev[toUpdate] = u;
				toUpdate->SetDistance(u->GetDistance() + 1);
			}
		}
		if(y + 1 < m_currentLevel->GetHeight() && m_nodeMap[x][y+1]) {
			Node* toUpdate = m_nodeMap[x][y+1];
			if(u->GetDistance() + 1 < toUpdate->GetDistance()) {
				prev[toUpdate] = u;
				toUpdate->SetDistance(u->GetDistance() + 1);
			}
		}
	}
	// Prep the list of path nodes to send back
	NodeList* toReturn = new NodeList();
	Node* next = prev[dest];
	toReturn->push_back(next);
	while(prev.find(next) != prev.end() && prev[next] != source) {
		next = prev[next];
		toReturn->push_back(next);
	}

	return toReturn;

}