Esempio n. 1
0
//***********************************************************************
// main function for outputing the graph
//***********************************************************************
void AnnotationGraphXmlDumper::dump(
    std::ostream& os,
    const AnnotationGraph* graph,
    const AnnotationData& annotationData) const
{
  DumpGraphVisitor vis(os, annotationData);

  os << "<annot-graph>" << std::endl;
//   Color color;
  boost::depth_first_search(*graph, boost::visitor(vis));
  os << "  <matchings>" << std::endl;
  std::map<StringsPoolIndex, std::multimap<AnnotationGraphVertex, AnnotationGraphVertex> >::const_iterator it, it_end;
  it = annotationData.matchings().begin();
  it_end = annotationData.matchings().end();
  for (; it != it_end; it++)
  {
    const std::multimap<AnnotationGraphVertex, AnnotationGraphVertex>& matching = (*it).second;
    os << "    <matching id=\"" << Misc::limastring2utf8stdstring(annotationData.annotationName((*it).first)) << "\">" << std::endl;
    std::multimap<AnnotationGraphVertex, AnnotationGraphVertex>::const_iterator sit, sit_end;
    sit = matching.begin(); sit_end = matching.end();
    for (; sit != sit_end; sit++)
    {
      os << "      <pair k=\""<<(*sit).first<<"\" v=\""<<(*sit).second<<"\"/>" << std::endl;
    }
    os << "    </matching>" << std::endl;
  }
  os << "  </matchings>" << std::endl;
  os << "</annot-graph>" << std::endl;
}
void
KML_Feature::build( const Config& conf, KMLContext& cx, osg::Node* working )
{
    KML_Object::build(conf, cx, working);

    // subclass feature is built; now add feature level data if available
    if ( working )
    {
        // parse the visibility to show/hide the item by default:
        if ( conf.hasValue("visibility") )
            working->setNodeMask( conf.value<bool>("visibility",true) == true ? ~0 : 0 );

        // parse a "LookAt" element (stores a viewpoint)
        AnnotationData* anno = getOrCreateAnnotationData(working);
        
        anno->setName( conf.value("name") );
        anno->setDescription( conf.value("description") );

        const Config& lookat = conf.child("lookat");
        if ( !lookat.empty() )
        {
            Viewpoint vp(
                lookat.value<double>("longitude", 0.0),
                lookat.value<double>("latitude", 0.0),
                lookat.value<double>("altitude", 0.0),
                lookat.value<double>("heading", 0.0),
                -lookat.value<double>("tilt", 45.0),
                lookat.value<double>("range", 10000.0) );

            anno->setViewpoint( vp );
        }
    }
}
    osg::Node* makePlaceNode(FilterContext&     context,
                             Feature*           feature, 
                             const Style&       style, 
                             NumericExpression& priorityExpr )
    {
        osg::Vec3d center = feature->getGeometry()->getBounds().center();

        AltitudeMode mode = ALTMODE_ABSOLUTE;        

        const AltitudeSymbol* alt = style.getSymbol<AltitudeSymbol>();
        if (alt &&
           (alt->clamping() == AltitudeSymbol::CLAMP_TO_TERRAIN || alt->clamping() == AltitudeSymbol::CLAMP_RELATIVE_TO_TERRAIN) &&
           alt->technique() == AltitudeSymbol::TECHNIQUE_SCENE)
        {
            mode = ALTMODE_RELATIVE;
        }                              

        GeoPoint point(feature->getSRS(), center.x(), center.y(), center.z(), mode);        

        PlaceNode* node = new PlaceNode(0L, point, style, context.getDBOptions());

        if ( !priorityExpr.empty() )
        {
            AnnotationData* data = new AnnotationData();
            data->setPriority( feature->eval(priorityExpr, &context) );
            node->setAnnotationData( data );
        }

        return node;
    }
Esempio n. 4
0
void
KML_Feature::build( xml_node<>* node, KMLContext& cx, osg::Node* working )
{
    KML_Object::build(node, cx, working);

    // subclass feature is built; now add feature level data if available
    if ( working )
    {
        // parse the visibility to show/hide the item by default:
		std::string visibility = getValue(node, "visibility");
        if ( !visibility.empty() )
            working->setNodeMask( as<int>(visibility, 1) == 1 ? ~0 : 0 );

        // parse a "LookAt" element (stores a viewpoint)
        AnnotationData* anno = getOrCreateAnnotationData(working);
        
        anno->setName( getValue(node, "name") );
        anno->setDescription( getValue(node, "description") );

        xml_node<>* lookat = node->first_node("lookat", 0, false);
        if ( lookat )
        {
            Viewpoint vp;

            vp.focalPoint() = GeoPoint(
                cx._srs.get(),
				as<double>(getValue(lookat, "longitude"), 0.0),
				as<double>(getValue(lookat, "latitude"), 0.0),
				as<double>(getValue(lookat, "altitude"), 0.0),
                ALTMODE_ABSOLUTE );

            vp.heading() =  as<double>(getValue(lookat, "heading"), 0.0);
            vp.pitch()   = -as<double>(getValue(lookat, "tilt"), 45.0),
            vp.range()   =  as<double>(getValue(lookat, "range"), 10000.0);

            anno->setViewpoint( vp );
        }

        xml_node<>* extdata = node->first_node("extendeddata", 0, false);
        if ( extdata )
        {
            xml_node<>* data = extdata->first_node("data", 0, false);
            if ( data )
            {
			    for (xml_node<>* n = data->first_node(); n; n = n->next_sibling())
			    {
    				working->setUserValue(getValue(n, "name"), getValue(n, "value"));
			    }
            }
        }
    }
}
Esempio n. 5
0
TrackNode* createTrack(TrackNodeFieldSchema& schema, osg::Image* image, const std::string& name, MapNode* mapNode, const osg::Vec3d& center, double radius, double time, TrackSimVector& trackSims)
{
  TrackNode* track = new TrackNode(mapNode, GeoPoint(mapNode->getMapSRS(),center,ALTMODE_ABSOLUTE), image, schema);
  track->setFieldValue(TRACK_FIELD_NAME, name);

  AnnotationData* data = new AnnotationData();
  data->setName(name);
  data->setViewpoint(osgEarth::Viewpoint(center, 0.0, -90.0, 1e5));
  track->setAnnotationData( data );

  trackSims.push_back(new TrackSim(track, center, radius, time, mapNode));

  return track;
}
Esempio n. 6
0
/** Builds a bunch of tracks. */
void
createTrackNodes( MapNode* mapNode, osg::Group* parent, const TrackNodeFieldSchema& schema, TrackSims& sims )
{
    // load an icon to use:
    osg::ref_ptr<osg::Image> srcImage = osgDB::readImageFile( ICON_URL );
    osg::ref_ptr<osg::Image> image;
    ImageUtils::resizeImage( srcImage.get(), ICON_SIZE, ICON_SIZE, image );

    // make some tracks, choosing a random simulation for each.
    Random prng;
    const SpatialReference* geoSRS = mapNode->getMapSRS()->getGeographicSRS();

    for( unsigned i=0; i<g_numTracks; ++i )
    {
        double lon0 = -180.0 + prng.next() * 360.0;
        double lat0 = -80.0 + prng.next() * 160.0;

        GeoPoint pos(geoSRS, lon0, lat0);

        TrackNode* track = new TrackNode(mapNode, pos, image, schema);

        track->setFieldValue( FIELD_NAME,     Stringify() << "Track:" << i );
        track->setFieldValue( FIELD_POSITION, Stringify() << s_format(pos) );
        track->setFieldValue( FIELD_NUMBER,   Stringify() << (1 + prng.next(9)) );

        // add a priority
        AnnotationData* data = new AnnotationData();
        data->setPriority( float(i) );
        track->setAnnotationData( data );

        Decluttering::setEnabled(track->getOrCreateStateSet(), true);

        parent->addChild( track );

        // add a simulator for this guy
        double lon1 = -180.0 + prng.next() * 360.0;
        double lat1 = -80.0 + prng.next() * 160.0;
        TrackSim* sim = new TrackSim();
        sim->_track = track;        
        sim->_startLat = lat0; sim->_startLon = lon0;
        sim->_endLat = lat1; sim->_endLon = lon1;
        sims.push_back( sim );
    }
}
    osg::Node* makePlaceNode(const FilterContext& context,
                             const Feature*       feature, 
                             const Style&         style, 
                             NumericExpression&   priorityExpr )
    {
        osg::Vec3d center = feature->getGeometry()->getBounds().center();
        GeoPoint point(feature->getSRS(), center.x(), center.y());

        PlaceNode* placeNode = new PlaceNode(0L, point, style, context.getDBOptions());

        if ( !priorityExpr.empty() )
        {
            AnnotationData* data = new AnnotationData();
            data->setPriority( feature->eval(priorityExpr, &context) );
            placeNode->setAnnotationData( data );
        }

        return placeNode;
    }
Esempio n. 8
0
bool EntityGroupTransition::
compare(const LinguisticAnalysisStructure::AnalysisGraph& graph,
        const LinguisticGraphVertex& v,
        AnalysisContent& analysis,
        const LinguisticAnalysisStructure::Token* /*token*/,
        const LinguisticAnalysisStructure::MorphoSyntacticData* /*data*/) const
{
  // should compare to vertex ?
  AnnotationData* annotationData = static_cast< AnnotationData* >(analysis.getData("AnnotationData"));
  if (annotationData==0) {
    AULOGINIT;
    LDEBUG << "EntityGroupTransition::compare: no annotation graph available !";
    return false;
  }

  // find annotationGraphVertex matching the vertex of the current graph
  std::set<AnnotationGraphVertex> matches = annotationData->matches(graph.getGraphId(), v, "annot");
  if (matches.empty())
  {
    AULOGINIT;
    LDEBUG << "annotation ("<<graph.getGraphId()<<", "<<v<<", \"annot\") available";
    return false;
  }
  AnnotationGraphVertex annotVertex = *(matches.begin());

  if (!annotationData->hasAnnotation(annotVertex, m_entityAnnotation))
  {
    AULOGINIT;
    LDEBUG << "EntityGroupTransition::compare: No " << m_entityAnnotation << " annotation available on " << v;
    return false;
  }
  
  const SpecificEntityAnnotation* se =
    annotationData->annotation(annotVertex, m_entityAnnotation).
    pointerValue<SpecificEntityAnnotation>();
  Common::MediaticData::EntityType type = se->getType();
  AULOGINIT;
  LDEBUG << "EntityGroupTransition::compare: type = " << type << ", groupId = " << type.getGroupId();
  LDEBUG << "EntityGroupTransition::compare: m_entityGroupId = " << m_entityGroupId;
  LDEBUG << "EntityGroupTransition::compare: tests m_entityGroupId == type.getGroupId() = " << (m_entityGroupId == type.getGroupId());
  return( m_entityGroupId == type.getGroupId() );
}
    osg::Node* makeLabelNode(const FilterContext& context, 
                             const Feature*       feature, 
                             const std::string&   value, 
                             const TextSymbol*    text, 
                             NumericExpression&   priorityExpr )
    {
        LabelNode* labelNode = new LabelNode(
            context.getSession()->getMapInfo().getProfile()->getSRS(),
            GeoPoint(feature->getSRS(), feature->getGeometry()->getBounds().center()),
            value,
            text );

        if ( text->priority().isSet() )
        {
            AnnotationData* data = new AnnotationData();
            data->setPriority( feature->eval(priorityExpr, &context) );
            labelNode->setAnnotationData( data );
        }

        return labelNode;
    }
Esempio n. 10
0
LimaStatusCode SpecificEntitiesXmlLogger::process(
  AnalysisContent& analysis) const
{
  SELOGINIT;
  LDEBUG << "SpecificEntitiesXmlLogger::process";
  TimeUtils::updateCurrentTime();

  AnnotationData* annotationData = static_cast< AnnotationData* >(analysis.getData("AnnotationData"));
  if (annotationData == 0) {
    SELOGINIT;
    LERROR << "no annotationData ! abort";
    return MISSING_DATA;
  }
  
  
  LinguisticAnalysisStructure::AnalysisGraph* graphp = static_cast<LinguisticAnalysisStructure::AnalysisGraph*>(analysis.getData(m_graph));
  if (graphp == 0) {
    SELOGINIT;
    LERROR << "no graph "<< m_graph <<" ! abort";
    return MISSING_DATA;
  }
  const LinguisticAnalysisStructure::AnalysisGraph& graph = *graphp;
  LinguisticGraph* lingGraph = const_cast<LinguisticGraph*>(graph.getGraph());
  VertexTokenPropertyMap tokenMap = get(vertex_token, *lingGraph);
  
  LinguisticMetaData* metadata=static_cast<LinguisticMetaData*>(analysis.getData("LinguisticMetaData"));
  if (metadata == 0) {
      SELOGINIT;
      LERROR << "no LinguisticMetaData ! abort";
      return MISSING_DATA;
  }

  DumperStream* dstream=initialize(analysis);
  ostream& out=dstream->out();

  uint64_t offset(0);
  try {
    offset=atoi(metadata->getMetaData("StartOffset").c_str());
  }
  catch (LinguisticProcessingException& ) {
    // do nothing: not set in analyzeText (only in analyzeXmlDocuments)
  }

  uint64_t offsetIndexingNode(0);
  try {
    offsetIndexingNode=atoi(metadata->getMetaData("StartOffsetIndexingNode").c_str());
  }
  catch (LinguisticProcessingException& ) {
    // do nothing: not set in analyzeText (only in analyzeXmlDocuments)
  }

  std::string docId("");
  try {
    docId=metadata->getMetaData("DocId");
  }
  catch (LinguisticProcessingException& ) {
    // do nothing: not set in analyzeText (only in analyzeXmlDocuments)
  }

  if (m_compactFormat) {
    out << "<entities docid=\"" << docId
    << "\" offsetNode=\"" << offsetIndexingNode 
    << "\" offset=\"" << offset
    << "\">" << endl;
  }
  else {
    out << "<specific_entities>" << endl;
  }
//   SELOGINIT;

  if (m_followGraph) {
    // instead of looking to all annotations, follow the graph (in
    // morphological graph, some vertices are not related to main graph:
    // idiomatic expressions parts and named entity parts)
    // -> this will not include nested entities

    AnalysisGraph* tokenList=static_cast<AnalysisGraph*>(analysis.getData(m_graph));
    if (tokenList==0) {
      LERROR << "graph " << m_graph << " has not been produced: check pipeline";
      return MISSING_DATA;
    }
    LinguisticGraph* graph=tokenList->getGraph();
    //const FsaStringsPool& sp=Common::MediaticData::MediaticData::single().stringsPool(m_language);
    
    std::queue<LinguisticGraphVertex> toVisit;
    std::set<LinguisticGraphVertex> visited;
    toVisit.push(tokenList->firstVertex());
    
    LinguisticGraphOutEdgeIt outItr,outItrEnd;
    while (!toVisit.empty()) {
      LinguisticGraphVertex v=toVisit.front();
      toVisit.pop();
      if (v == tokenList->lastVertex()) {
        continue;
      }
      
      for (boost::tie(outItr,outItrEnd)=out_edges(v,*graph); outItr!=outItrEnd; outItr++) 
      {
        LinguisticGraphVertex next=target(*outItr,*graph);
        if (visited.find(next)==visited.end())
        {
          visited.insert(next);
          toVisit.push(next);
        }
      }
      const SpecificEntityAnnotation* annot=getSpecificEntityAnnotation(v,annotationData);
      if (annot != 0) {
        outputEntity(out,v,annot,tokenMap,offset);
      }
    }
  }
  else {
    // take all annotations
    AnnotationGraphVertexIt itv, itv_end;
    boost::tie(itv, itv_end) = vertices(annotationData->getGraph());
    for (; itv != itv_end; itv++)
    {
      //     LDEBUG << "SpecificEntitiesXmlLogger on annotation vertex " << *itv;
      if (annotationData->hasAnnotation(*itv,Common::Misc::utf8stdstring2limastring("SpecificEntity")))
      {
        //       LDEBUG << "    it has SpecificEntityAnnotation";
        const SpecificEntityAnnotation* annot = 0;
        try
        {
          annot = annotationData->annotation(*itv,Common::Misc::utf8stdstring2limastring("SpecificEntity"))
          .pointerValue<SpecificEntityAnnotation>();
        }
        catch (const boost::bad_any_cast& )
        {
          SELOGINIT;
          LERROR << "This annotation is not a SpecificEntity; SE not logged";
          continue;
        }
        
        // recuperer l'id du vertex morph cree
        LinguisticGraphVertex v;
        if (!annotationData->hasIntAnnotation(*itv,Common::Misc::utf8stdstring2limastring(m_graph)))
        {
          //         SELOGINIT;
          //         LDEBUG << *itv << " has no " << m_graph << " annotation. Skeeping it.";
          continue;
        }
        v = annotationData->intAnnotation(*itv,Common::Misc::utf8stdstring2limastring(m_graph));
        outputEntity(out,v,annot,tokenMap,offset);
      }
    }
  }   
  
  //   LDEBUG << "    all vertices done";
  if (m_compactFormat) {
    out << "</entities>" << endl;
  }
  else {
    out << "</specific_entities>" << endl;
  }
  delete dstream;
  TimeUtils::logElapsedTime("SpecificEntitiesXmlLogger");
  return SUCCESS_ID;
  
}
// main routine
int main(int argc, char* argv[]) {

    //%%%%%%%%%%%%%%%%%%%%%%%% init %%%%%%%%%%%%%%%%%%%%%%%%

    // read arguments
    if(argc<3) {
        cerr << "Usage: ComputeFeatures config.txt override(no(0)/yes(1))" << endl;
        exit(-1);
    }

    // read config file
    StructParam param;
    if(!param.loadConfigFeature(argv[1])) {
        cerr << "Could not parse " << argv[1] << endl;
        exit(-1);
    }

    // read test/anno data (uses same data structure)
    AnnotationData TestD;
    TestD.loadAnnoFile(param.test_file.c_str());

    //if(atoi(argv[2])==2)
    //system(("rm " + param.feature_path + "/*.pgm").c_str());


    // detect hypotheses on all images
    for(int i=0; i<TestD.AnnoData.size(); ++i) {

        // read image
        Mat originImg = imread((param.image_path+"/"+TestD.AnnoData[i].image_name).c_str());
        if(originImg.empty()) {
            cerr << "Could not read image file " << param.image_path << "/" << TestD.AnnoData[i].image_name << endl;
            continue;
        }

        cout << system(("mkdir " + param.feature_path + "/" + TestD.AnnoData[i].image_name).c_str());

        // extract features
        for(int k=0; k<param.scales.size(); ++k) {

            Features Feat;
            string fname(param.feature_path+"/"+TestD.AnnoData[i].image_name+"/"+TestD.AnnoData[i].image_name);
            if( atoi(argv[2])==1 || !Feat.loadFeatures( fname, param.scales[k]) ) {

                Mat scaledImg;
                resize(originImg, scaledImg, Size(int(originImg.cols * param.scales[k] + 0.5), int(originImg.rows * param.scales[k] + 0.5)) );
                Feat.extractFeatureChannels(scaledImg);
                Feat.saveFeatures( fname, param.scales[k]);

#if 0
                // debug!!!!
                Features Feat2;
                namedWindow( "ShowF", CV_WINDOW_AUTOSIZE );
                imshow( "ShowF", Feat.Channels[0] );

                Feat2.loadFeatures( fname, param.scales[k]);

                namedWindow( "ShowF2", CV_WINDOW_AUTOSIZE );
                imshow( "ShowF2", Feat2.Channels[0] );

                cout << scaledImg.rows << " " << scaledImg.cols << " " << scaledImg.depth() << " " << scaledImg.channels() << " " << scaledImg.isContinuous() << endl;
                cout << Feat.Channels[0].rows << " " << Feat.Channels[0].cols << " " << Feat.Channels[0].depth() << " " << Feat.Channels[0].channels() << " " << Feat.Channels[0].isContinuous() << endl;
                cout << Feat2.Channels[0].rows << " " << Feat2.Channels[0].cols << " " << Feat2.Channels[0].depth() << " " << Feat2.Channels[0].channels() << " " << Feat.Channels[0].isContinuous() << endl;


                Mat diff(Size(scaledImg.cols,scaledImg.rows),CV_8UC1);
                cout << diff.rows << " " << diff.cols << " " << diff.depth() << " " << diff.channels() << " " << scaledImg.isContinuous() << endl;

                diff = Feat.Channels[0] - Feat2.Channels[0];

                namedWindow( "ShowDiff", CV_WINDOW_AUTOSIZE );
                imshow( "ShowDiff", diff );
                waitKey(0);
#endif
            }

        }

    }

    return 0;

}
Esempio n. 12
0
LimaStatusCode SemanticRelationsXmlLogger::
process(AnalysisContent& analysis) const
{
  TimeUtils::updateCurrentTime();
  
  SEMLOGINIT;
  LERROR << "SemanticRelationsXmlLogger" << LENDL;
    
  AnnotationData* annotationData = static_cast< AnnotationData* >(analysis.getData("AnnotationData"));
  
  const LinguisticAnalysisStructure::AnalysisGraph& graph = 
    *(static_cast<LinguisticAnalysisStructure::AnalysisGraph*>(analysis.getData(m_graph)));
  
  LinguisticGraph* lingGraph = const_cast<LinguisticGraph*>(graph.getGraph());
  VertexTokenPropertyMap tokenMap = get(vertex_token, *lingGraph);
  LinguisticMetaData* metadata=static_cast<LinguisticMetaData*>(analysis.getData("LinguisticMetaData"));
  if (metadata == 0) {
      SEMLOGINIT;
      LERROR << "no LinguisticMetaData ! abort" << LENDL;
      return MISSING_DATA;
  }

  ofstream out;
  if (!openLogFile(out,metadata->getMetaData("FileName"))) {
    SEMLOGINIT;
    LERROR << "Can't open log file " << LENDL;
    return UNKNOWN_ERROR;
  }

  uint64_t offset(0);
  try {
    offset=atoi(metadata->getMetaData("StartOffset").c_str());
  }
  catch (LinguisticProcessingException& e) {
    // do nothing: not set in analyzeText (only in analyzeXmlDocuments)
  }

  uint64_t offsetIndexingNode(0);
  try {
    offsetIndexingNode=atoi(metadata->getMetaData("StartOffsetIndexingNode").c_str());
  }
  catch (LinguisticProcessingException& e) {
    // do nothing: not set in analyzeText (only in analyzeXmlDocuments)
  }

  std::string docId("");
  try {
    docId=metadata->getMetaData("DocId");
  }
  catch (LinguisticProcessingException& e) {
    // do nothing: not set in analyzeText (only in analyzeXmlDocuments)
  }

  out << "<relations docid=\"" << docId
      << "\" offsetNode=\"" << offsetIndexingNode 
      << "\">" << endl;

//   LDEBUG << "SemanticRelationsXmlLogger on graph " << m_graph << LENDL;
  
  //look at all vertices for annotations
  AnnotationGraphVertexIt itv, itv_end;
  boost::tie(itv, itv_end) = vertices(annotationData->getGraph());
  for (; itv != itv_end; itv++)
  {
    LDEBUG << "SemanticRelationsXmlLogger on annotation vertex " << *itv << LENDL;
    if (annotationData->hasAnnotation(*itv,Common::Misc::utf8stdstring2limastring("SemanticAnnotation")))
    {
//       LDEBUG << "    it has SemanticRelationAnnotation" << LENDL;
      const SemanticAnnotation* annot = 0;
      try
      {
        annot = annotationData->annotation(*itv,Common::Misc::utf8stdstring2limastring("SemanticAnnotation"))
          .pointerValue<SemanticAnnotation>();
      }
      catch (const boost::bad_any_cast& e)
      {
        SEMLOGINIT;
        LERROR << "This annotation is not a SemanticRelation" << LENDL;
        continue;
      }

      // output
      out << "<annotation type=\"" << annot->getType() << "\">" << endl
          << vertexStringForSemanticAnnotation("vertex",*itv,tokenMap,annotationData,offset)
          << "</annotation>" << endl;
    }
  }

  // look at all edges for relations
  AnnotationGraphEdgeIt it,it_end;
  const AnnotationGraph& annotGraph=annotationData->getGraph();
  boost::tie(it, it_end) = edges(annotGraph);
  for (; it != it_end; it++) {
    LDEBUG << "SemanticRelationsXmlLogger on annotation edge " 
           << source(*it,annotGraph) << "->" << target(*it,annotationData->getGraph()) << LENDL;
    if (annotationData->hasAnnotation(*it,Common::Misc::utf8stdstring2limastring("SemanticRelation")))
    {
      SEMLOGINIT;
      LDEBUG << "found semantic relation" << LENDL;
      const SemanticRelationAnnotation* annot = 0;
      try
      {
        annot = annotationData->annotation(*it,Common::Misc::utf8stdstring2limastring("SemanticRelation"))
          .pointerValue<SemanticRelationAnnotation>();
      }
      catch (const boost::bad_any_cast& e)
      {
        SEMLOGINIT;
        LERROR << "This annotation is not a SemanticAnnotation" << LENDL;
        continue;
      }

      //output
      out << "<relation type=\"" << annot->type() << "\">" << endl
          << vertexStringForSemanticAnnotation("source",source(*it,annotGraph),tokenMap,annotationData,offset)
          << vertexStringForSemanticAnnotation("target",target(*it,annotGraph),tokenMap,annotationData,offset)
          << "</relation>" << endl;
      
    }
  }

//   LDEBUG << "    all vertices done" << LENDL;
  out << "</relations>" << endl;
  out.close();

  TimeUtils::logElapsedTime("SemanticRelationsXmlLogger");
  return SUCCESS_ID;
}
int detect_L2(int argc, char* argv[]) {

    cout << "Testing L2 started" << endl;
    if(argc<3) {
        cerr << "Usage: HFTrainDetect config.txt 1 [image] [detection]" << endl;
        return -1;
    }

    // timer
    timeval start, end;
    gettimeofday(&start, NULL);
    double runtime=0;

    // read config file
    StructParam param;
    if(!param.loadConfigDetect_L2(argv[1])) {
        cerr << "Could not parse " << argv[1] << endl;
        exit(-1);
    }

    // load first layer forest
    HFForest_L1 forest_L1(&param);
    forest_L1.loadForest(param.treepath_L1);

    // load second layer forest
    HFForest_L2 forest_L2(&param);
    forest_L2.loadForest(param.treepath_L2);

    AnnotationData TestD;
    TestD.loadAnnoFile(param.test_file.c_str());

    // detect hypotheses on all images
    for(unsigned int i=0; i<TestD.AnnoData.size(); ++i) {

        // read image
        string fileName = param.image_path+"/"+TestD.AnnoData[i].image_name;
        string depthFileName = param.depth_image_path+"/"+TestD.AnnoData[i].image_name.substr(0,TestD.AnnoData[i].image_name.size()-4)+"_abs_smooth.png";
        Mat originImg = imread(fileName);
        Mat depthImg  = imread(depthFileName,CV_LOAD_IMAGE_ANYDEPTH);    // depthImg is UINT16

        // calculate leaf id maps using first layer forest
        cout<<"evaluating leafId maps"<<endl;
        vector<vector<vector<Mat> > > leafIdMaps;
        forest_L1.evaluateLeafIdMaps(originImg, depthImg, leafIdMaps);

//        // get the vote maps from the first layer
//        cout<<"evaluating L1 vote maps"<<endl;
//        vector<vector<Mat> > voteMaps_L1;
//        forest_L1.returnVoteMaps(originImg, depthImg, voteMaps_L1);

        // get the vote maps from the second layer
        cout<<"evaluating L2 vote maps"<<endl;
        vector<vector<Mat> > voteMaps_L2;
        forest_L2.returnVoteMaps(leafIdMaps,voteMaps_L2,originImg);

#if 0
        namedWindow("show",CV_WINDOW_AUTOSIZE);
        for(unsigned int aspIdx=0; aspIdx<param.asp_ratios.size(); ++aspIdx){
            for(unsigned int sclIdx=0; sclIdx<param.scales.size(); ++sclIdx){
                Mat show;
                voteMaps_L2[aspIdx][sclIdx].convertTo(show,CV_8U,255*0.05);
                imshow("show",show);
                waitKey(0);
            }
        }
#endif

        Hypotheses hyp;
        forest_L2.detect(hyp,voteMaps_L2);
//        hyp.save_detections((param.hypotheses_path+"/"+TestD.AnnoData[i].image_name+".txt").c_str());
//        hyp.show_detections(originImg,param.d_thres);

//        // pass leafIdMaps to second layer forest for detection
//        cout<<"evaluating combined detection"<<endl;
//        vector<Hypotheses> bigHyp;
//        forest_L2.detect(bigHyp, voteMaps_L1, voteMaps_L2);
//
//        // save detections
//        for(unsigned int hypIdx=0; hypIdx<bigHyp.size(); ++hypIdx){
//            char buffer[5];
//            sprintf(buffer,"%02d",hypIdx);
//            string strBuffer = buffer;
//            bigHyp[hypIdx].save_detections( (param.hypotheses_path+"/lambda"+strBuffer+"/"+TestD.AnnoData[i].image_name+".txt").c_str());
//        }
    }

    gettimeofday(&end, NULL);
    runtime = ( (end.tv_sec - start.tv_sec)*1000 + (end.tv_usec - start.tv_usec)/(1000.0) );
    cout << "Total runtime (L2 test): " << runtime << " msec" << endl;

    return 0;

}
int detect(int argc, char* argv[]) {

    cout << "Testing L1 started" << endl;
    if(argc<3) {
        cerr << "Usage: HFTrainDetect config.txt 1 [image] [detection]" << endl;
        return -1;
    }

    // read config file
    StructParam param;
    if(!param.loadConfigDetect(argv[1])) {
        cerr << "Could not parse " << argv[1] << endl;
        exit(-1);
    }

    // timer
    timeval start, end;
    double runtime;
    gettimeofday(&start, NULL);

    // load forest
    HFForest_L1 forest(&param);
    forest.loadForest(param.treepath_L1);


    AnnotationData TestD;
    TestD.loadAnnoFile(param.test_file.c_str());

    // detect hypotheses on all images
    for(unsigned int i=0; i<TestD.AnnoData.size(); ++i) {

        // read image
        Mat originImg = imread((param.image_path+"/"+TestD.AnnoData[i].image_name).c_str()); // originImg is UINT8_3Channel
        string depthFileName = param.depth_image_path+"/"+TestD.AnnoData[i].image_name.substr(0,TestD.AnnoData[i].image_name.size()-4)+"_abs_smooth.png";
        Mat depthImg  = imread(depthFileName,CV_LOAD_IMAGE_ANYDEPTH);    // depthImg is UINT16

        if(originImg.empty()) {
            cerr << "Could not read image file " << param.image_path << "/" << TestD.AnnoData[i].image_name << endl;
            continue;
        }

        // detect
        Hypotheses hyp;
        if(param.feature_path.empty()) {
            forest.detect(TestD.AnnoData[i].image_name,originImg,depthImg,hyp);
        }

        #if 0
        // evaluate
        Annotation train;
        hyp.check(TestD.AnnoData[i], param.d_thres, train);
        #endif

        // save detections
        hyp.save_detections( (param.hypotheses_path+"/"+TestD.AnnoData[i].image_name+".txt").c_str());

        #if 0
        // show detections
        hyp.show_detections(originImg, param.d_thres);
        #endif

    }

    gettimeofday(&end, NULL);
    runtime = ( (end.tv_sec - start.tv_sec)*1000 + (end.tv_usec - start.tv_usec)/(1000.0) );
    cout << "Total runtime (L1 test): " << runtime << " msec" << endl;

    return 0;

}
bool CreateIdiomaticAlternative::operator()(Automaton::RecognizerMatch& result,
                                            AnalysisContent& analysis) const
{
#ifdef DEBUG_LP
    MORPHOLOGINIT;
    LDEBUG << "CreateIdiomaticAlternative, match is " << result;
    LDEBUG << "    expression is " << (result.isContiguous()?"":"non") <<
     " contiguous and" << (result.isContextual()?" non":"") << " absolute";
#endif
  if (result.empty()) return false;
  const LinguisticAnalysisStructure::AnalysisGraph& graph = *(result.getGraph());
  AnnotationData* annotationData = static_cast< AnnotationData* >(analysis.getData("AnnotationData"));
  if (annotationData->dumpFunction("IdiomExpr") == 0)
  {
    annotationData->dumpFunction("IdiomExpr", new DumpIdiomaticExpressionAnnotation());
  }
  
  RecognizerData* recoData=static_cast<RecognizerData*>(analysis.getData("RecognizerData"));
  
  std::set<LinguisticGraphVertex> addedVertices;
  // initialize the vertices to clear

  if (result.isContiguous())
  {
//     MORPHOLOGINIT;
//      LDEBUG << "contiguous idiomatic expression found: "
//          << result.concatString();

    // only one part : terms in expression are adjacent -> easy part

    // check if there is an overlap first
    if (recoData->matchOnRemovedVertices(result))
    {
      // ignore current idiomatic expression, continue
      MORPHOLOGINIT;
      LWARN << "idiomatic expression ignored: " << Common::Misc::limastring2utf8stdstring(result.concatString())
          << ": overlapping with a previous one";
      return false;
    }

    // create the new token
    std::pair<Token*,MorphoSyntacticData*> newToken = createAlternativeToken(result);
    if (newToken.second->empty())
    {
      // ignore current idiomatic expression, continue
      MORPHOLOGINIT;
      LERROR << "CreateIdiomaticAlternative::operator() Got empty morphosyntactic data. Abort";
      delete newToken.first;
      delete newToken.second;
      return false;
    }

    // add the vertex
    LinguisticGraphVertex idiomaticVertex =
        addAlternativeVertex(newToken.first, newToken.second, const_cast<LinguisticGraph*>(graph.getGraph()));
    AnnotationGraphVertex agv =  annotationData->createAnnotationVertex();
    annotationData->addMatching("AnalysisGraph", idiomaticVertex, "annot", agv);
    annotationData->annotate(agv, Common::Misc::utf8stdstring2limastring("AnalysisGraph"), idiomaticVertex);
    IdiomaticExpressionAnnotation annot(result);
    GenericAnnotation ga(annot);
    annotationData->annotate(agv, Common::Misc::utf8stdstring2limastring("IdiomExpr"), ga);

    addedVertices.insert(idiomaticVertex);

    //create the alternative with this only vertex
    createBeginAlternative(result.front().getVertex(),
                            idiomaticVertex,const_cast<LinguisticGraph&>(*graph.getGraph()));
    attachEndOfAlternative(idiomaticVertex,
                            result.back().getVertex(),const_cast<LinguisticGraph&>(*graph.getGraph()));

    // if expression is not contextual, only keep alternative
    if (! result.isContextual())
    {
      recoData->storeVerticesToRemove(result,const_cast<LinguisticGraph*>(graph.getGraph()));
      removeEdges(const_cast<LinguisticGraph&>(*graph.getGraph()),
                 result, analysis);
      //recoData->setNextVertex(idiomaticVertex);
      // if match was on single token, use next vertices (to avoid loops)
      if (result.size() > 1) 
      {
        recoData->setNextVertex(idiomaticVertex);
      }
      else 
      {
        LinguisticGraphOutEdgeIt outItr,outItrEnd;
        boost::tie(outItr,outItrEnd) = out_edges(idiomaticVertex,*(graph.getGraph()));
        for (;outItr!=outItrEnd;outItr++) 
        {
          recoData->setNextVertex(target(*outItr, *(graph.getGraph())));
        }
      }
    }
  }
  else
  {
    // several parts : tough case
//     MORPHOLOGINIT;
//      LDEBUG << "non contiguous idiomatic expression found: "
//          << result.concatString();

    // check if there is an overlap first
    if (recoData->matchOnRemovedVertices(result))
    {
      // ignore current idiomatic expression, continue
      MORPHOLOGINIT;
      LWARN << "idiomatic expression ignored: " << Common::Misc::limastring2utf8stdstring(result.concatString())
          << ": overlapping with a previous one";
      return false;
    }

    // create the new token
    pair<Token*,MorphoSyntacticData*> newToken = createAlternativeToken(result);
    if (newToken.second->empty())
    {
      // ignore current idiomatic expression, continue
      MORPHOLOGINIT;
      LERROR << "CreateIdiomaticAlternative::operator() Got empty morphosyntactic data. Abort";
      delete newToken.first;
      delete newToken.second;
      return false;
    }

    // add the vertex
    LinguisticGraphVertex idiomaticVertex =
        addAlternativeVertex(newToken.first,newToken.second,const_cast<LinguisticGraph*>(graph.getGraph()));
    addedVertices.insert(idiomaticVertex);
    AnnotationGraphVertex agv =  annotationData->createAnnotationVertex();
    annotationData->addMatching("AnalysisGraph", idiomaticVertex, "annot", agv);
    annotationData->annotate(agv, Common::Misc::utf8stdstring2limastring("AnalysisGraph"), idiomaticVertex);
    IdiomaticExpressionAnnotation annot(result);
    GenericAnnotation ga(annot);
    annotationData->annotate(agv, Common::Misc::utf8stdstring2limastring("IdiomExpr"), ga);

    //create the alternative with this vertex and duplicate of other vertices
    deque<LinguisticGraphVertex> idiomAlternative;
    LinguisticGraphVertex headVertex=result.getHead();
#ifdef DEBUG_LP
   LDEBUG << "headVertex = " << headVertex;
    if (headVertex!=0) 
    {
      LDEBUG << "=> " << Common::Misc::limastring2utf8stdstring(get(vertex_token,*graph.getGraph(),headVertex)->stringForm());
    }
#endif
    bool foundHead=false;
    bool keeping = false;
    std::pair< LinguisticGraphVertex, LinguisticGraphVertex > idiomPartBounds;
    std::set< std::pair< LinguisticGraphVertex, LinguisticGraphVertex > > edgesToRemove;
    RecognizerMatch::const_iterator matchItr=result.begin();
    for (; matchItr!=result.end(); matchItr++)
    {
      if (!matchItr->isKept())
      {
        if (keeping)
        {
          RecognizerMatch::const_iterator prevItr = matchItr - 1;
          idiomPartBounds.second = prevItr->getVertex();
          keeping = false;
#ifdef DEBUG_LP
          LDEBUG << "adding " << idiomPartBounds.first << " -> " << idiomPartBounds.second << " in edgesToRemove";
#endif
          edgesToRemove.insert(idiomPartBounds);
        }
        // duplicate this vertex
#ifdef DEBUG_LP
        LDEBUG << "duplication of vertex " << matchItr->getVertex();;
#endif
        Token* token=get(vertex_token,*graph.getGraph(),matchItr->getVertex());
        MorphoSyntacticData* data = 
          new MorphoSyntacticData(*get(vertex_data,*graph.getGraph(),matchItr->getVertex()));
        LinguisticGraphVertex dupVx = add_vertex(const_cast<LinguisticGraph&>(*graph.getGraph()));
        put(vertex_token,const_cast<LinguisticGraph&>(*graph.getGraph()),dupVx,token);
        put(vertex_data,const_cast<LinguisticGraph&>(*graph.getGraph()),dupVx,data);
        idiomAlternative.push_back(dupVx);
        AnnotationGraphVertex agv =  annotationData->createAnnotationVertex();
        annotationData->addMatching("AnalysisGraph", dupVx, "annot", agv);
        annotationData->annotate(agv, Common::Misc::utf8stdstring2limastring("AnalysisGraph"), dupVx);
        std::set< LinguisticGraphVertex > annotMatches = 
          annotationData->matches("AnalysisGraph",matchItr->getVertex(),"annot");
        for (std::set< LinguisticGraphVertex >::const_iterator annotIt(annotMatches.begin());
              annotIt != annotMatches.end(); annotIt++)
        {
          std::set< std::string > excepted;
          excepted.insert("AnalysisGraph");
          annotationData->cloneAnnotations(*annotIt, agv, excepted);
        }
        addedVertices.insert(dupVx);
//         verticesToRemove.insert(matchItr->getVertex());
      }
      else
      {
        if (!keeping)
        {
          idiomPartBounds.first = matchItr->getVertex();
          keeping = true;
        }
#ifdef DEBUG_LP
         LDEBUG << "kept vertex " << matchItr->getVertex();
#endif
        if (matchItr->getVertex()==headVertex)
        {
          foundHead=true;
#ifdef DEBUG_LP
           LDEBUG << "add head vertex " << idiomaticVertex;
#endif
          idiomAlternative.push_back(idiomaticVertex);
        }
      }
    }
    if (!foundHead) 
    {
      MORPHOLOGINIT;
      LWARN << "head token has not been found in non contiguous expression. "
          << "Idiomatic token is placed first";
      idiomAlternative.push_front(idiomaticVertex);
    }
    if (keeping)
    {
      RecognizerMatch::const_iterator prevItr = matchItr - 1;
      idiomPartBounds.second = prevItr->getVertex();
      keeping = false;
#ifdef DEBUG_LP
      LDEBUG << "adding " << idiomPartBounds.first << " -> " << idiomPartBounds.second << " in edgesToRemove";
#endif
      edgesToRemove.insert(idiomPartBounds);
    }

    // link alternatives
#ifdef DEBUG_LP
     LDEBUG << "idiomAlternative has " << idiomAlternative.size() << " vertex";
#endif
    createBeginAlternative(result.front().getVertex(),
                            idiomAlternative.front(),const_cast<LinguisticGraph&>(*graph.getGraph()));
    {
      deque<LinguisticGraphVertex>::const_iterator idItr=idiomAlternative.begin();
      LinguisticGraphVertex lastIdiomVx=*idItr;
      idItr++;
      while (idItr!=idiomAlternative.end())
      {
        LinguisticGraphEdge newEdge;
        bool ok;
        boost::tie(newEdge, ok) = add_edge(lastIdiomVx,*idItr,const_cast<LinguisticGraph&>(*graph.getGraph()));
#ifdef DEBUG_LP
         LDEBUG << "added new edge in alternatives linking: " << newEdge.m_source << " -> " << newEdge.m_target;
#endif
        lastIdiomVx=*idItr;
        idItr++;
      }
    }
    attachEndOfAlternative(idiomAlternative.back(),
                            result.back().getVertex(),const_cast<LinguisticGraph&>(*graph.getGraph()));

    // if expression is not contextual, only keep alternative
    if (! result.isContextual())
    {
#ifdef DEBUG_LP
      LDEBUG << "expression is not contextual, only keep alternative";
#endif
      std::set< std::pair< LinguisticGraphVertex, LinguisticGraphVertex > >::const_iterator edgesToRemoveIt, edgesToRemoveIt_end;
      edgesToRemoveIt = edgesToRemove.begin(); edgesToRemoveIt_end = edgesToRemove.end();
      for (; edgesToRemoveIt != edgesToRemoveIt_end; edgesToRemoveIt++)
      {
#ifdef DEBUG_LP
         LDEBUG << "Removing edge " << (*edgesToRemoveIt).first << " -> " << (*edgesToRemoveIt).second;
#endif
        removeEdges(const_cast<LinguisticGraph&>(*graph.getGraph()),
                   result, analysis);
      }

//       recoData->storeVerticesToRemove(result,*graph);
      // no need to check size: if several parts, more than one vertex
      recoData->setNextVertex(idiomaticVertex);
      
    }
  }
  RecognizerMatch::const_iterator matchItr=result.begin();
  for (; matchItr!=result.end(); matchItr++)
  {
    recoData->clearUnreachableVertices( analysis, (*matchItr).getVertex());
  }
//   recoData->clearUnreachableVertices( analysis, result.front().getVertex(), result.back().getVertex(), storedEdges);
  return true;
}
      LimaStatusCode CorefSolvingNormalizedXmlLogger::process(
        AnalysisContent& analysis) const
      {
//         COREFSOLVERLOGINIT;
        TimeUtils::updateCurrentTime();
        AnnotationData* annotationData = static_cast<AnnotationData*>(analysis.getData("AnnotationData"));
        const LinguisticAnalysisStructure::AnalysisGraph& graph = *(static_cast<LinguisticAnalysisStructure::AnalysisGraph*>(analysis.getData(m_graph)));

//         LinguisticGraph* lingGraph = const_cast<LinguisticGraph*>(graph.getGraph());
        LinguisticMetaData* metadata=static_cast<LinguisticMetaData*>(analysis.getData("LinguisticMetaData"));
        if (metadata == 0)
        {
          COREFSOLVERLOGINIT;
          LERROR << "no LinguisticMetaData ! abort" << LENDL;
          return MISSING_DATA;
        }

        ofstream out;
        if (!openLogFile(out,metadata->getMetaData("FileName")))
        {
          COREFSOLVERLOGINIT;
          LERROR << "Can't open log file " << LENDL;
          return UNKNOWN_ERROR;
        }

        out << "<coreferences>" << endl;


        //   LDEBUG << "CorefSolvingNormalizedXmlLogger on graph " << m_graph << LENDL;
        AnnotationGraphVertexIt itv, itv_end;
        boost::tie(itv, itv_end) = vertices(annotationData->getGraph());
        for (; itv != itv_end; itv++)
        {
          // process
          //LDEBUG << "CorefSolvingNormalizedXmlLogger on annotation vertex " << *itv << LENDL;
          if (annotationData->hasAnnotation(*itv,utf8stdstring2limastring("Coreferent")))
            //if (annotationData->hasAnnotation(*itv,utf8stdstring2limastring("Coreferent")))
          {
            CoreferentAnnotation* annot ;
            try
            {
              annot = annotationData->annotation(*itv,utf8stdstring2limastring("Coreferent"))
                      .pointerValue<CoreferentAnnotation>();
            }
            catch (const boost::bad_any_cast& )
            {
              COREFSOLVERLOGINIT;
              LERROR << "One annotation on vertex " << *itv << " you are trying to cast is not a Coreference; Coreference not logged" << LENDL;
              for (int i = 0; i < 19 ; i++)
              {
                LERROR << "annot "<< i << " : " << limastring2utf8stdstring(annotationData->annotationName(i)) << LENDL ;
              }
              continue;
            }
            LinguisticProcessing::LinguisticAnalysisStructure::Token* token = get(vertex_token, *graph.getGraph(), annot->morphVertex());
            if (token == 0)
            {
              COREFSOLVERLOGINIT;
              LERROR << "Vertex " << *itv << " has no entry in the analysis graph token map. This should not happen !!" << LENDL;
            }
            else
            {
              CoreferentAnnotation* antecedent;
//               bool hasAntecedent = false;
              AnnotationGraphOutEdgeIt it, it_end;
              boost::tie(it, it_end) = boost::out_edges(static_cast<AnnotationGraphVertex>(*itv), annotationData->getGraph());

              for (; it != it_end; it++)
              {
                if (annotationData->hasAnnotation(target(*it,annotationData->getGraph()),utf8stdstring2limastring("Coreferent")))
                {
                  try
                  {
                    antecedent = annotationData->annotation(target(*it, annotationData->getGraph()), utf8stdstring2limastring("Coreferent")).pointerValue<CoreferentAnnotation>();
//                     hasAntecedent = true;
                  }
                  catch (const boost::bad_any_cast& )
                  {
                    COREFSOLVERLOGINIT;
                    LERROR << "One annotation on vertex you are trying to cast resulting from an edge out of " << *itv << " is not a Coreference; Coreference not logged" << LENDL;
                    continue;
                  }
                }
              }
              out << "  <reference>\n"
              << "    <pos>" << get(vertex_token,*graph.getGraph(),annot->morphVertex())->position() << "</pos>\n"
              << "    <len>" << token->stringForm().length() << "</len>\n"
              << "    <string>"<< limastring2utf8stdstring(transcodeToXmlEntities(token->stringForm())) << "</string>\n"
              << "    <npId>" << annot->id() << "</npId>\n"
              << "    <posVertex>" << annot->morphVertex() << "</posVertex>\n";
              //if (hasAntecedent)
              if (false)
              {
                out << "    <npRef>" << antecedent->id() << "</npRef>\n";
                out << "    <refPosVertex>" << antecedent->morphVertex() << "</refPosVertex>\n";
              }
              out << "    <categ>" << annot->categ() << "</categ>\n"
                    << "  </reference>\n"
              << endl;
            }
          }
        }
        out << "</coreferences>" << endl;
        out.close();

        TimeUtils::logElapsedTime("CorefSolvingNormalizedXmlLogger");
        return SUCCESS_ID;

      }
Esempio n. 17
0
LimaStatusCode EntityTracker::process(AnalysisContent& analysis) const
{
  TimeUtils::updateCurrentTime();
  SELOGINIT;

  LinguisticMetaData* metadata=static_cast<LinguisticMetaData*>(analysis.getData("LinguisticMetaData"));
  if (metadata == 0)
  {
    LERROR << "no LinguisticMetaData ! abort" << LENDL;
    return MISSING_DATA;
  }

  AnalysisGraph* anagraph=static_cast<AnalysisGraph*>(analysis.getData("AnalysisGraph"));
  if (anagraph==0)
  {
    LERROR << "no graph 'AnaGraph' available !" << LENDL;
    return MISSING_DATA;
  }

  AnnotationData* annotationData = static_cast< AnnotationData* >(analysis.getData("AnnotationData"));
  if (annotationData==0)
  {
    LERROR << "no annotation graph available !" << LENDL;
    return MISSING_DATA;
  }

  // add new data to store co-references
  CoreferenceData* corefData = new CoreferenceData;
  analysis.setData("CoreferenceData",corefData);
  
  CoreferenceEngine ref;
  LinguisticGraph* graph=anagraph->getGraph();
  LinguisticGraphVertex lastVertex=anagraph->lastVertex();
  LinguisticGraphVertex firstVertex=anagraph->firstVertex();

  std::queue<LinguisticGraphVertex> toVisit;
  std::set<LinguisticGraphVertex> visited;

  LinguisticGraphOutEdgeIt outItr,outItrEnd;

  // output vertices between begin and end,
  // but do not include begin (beginning of text or previous end of sentence) and include end (end of sentence)
  toVisit.push(firstVertex);

  bool first=true;
  bool last=false;
  while (!toVisit.empty()) {
    LinguisticGraphVertex v=toVisit.front();
    toVisit.pop();
    if (last || v == lastVertex) {
      continue;
    }
    if (v == lastVertex) {
      last=true;
    }

    for (boost::tie(outItr,outItrEnd)=out_edges(v,*graph); outItr!=outItrEnd; outItr++)
    {
      LinguisticGraphVertex next=target(*outItr,*graph);
      if (visited.find(next)==visited.end())
      {
        visited.insert(next);
        toVisit.push(next);
      }
    }

    if (first) {
      first=false;
    }
    else {
   // first, check if vertex corresponds to a specific entity
    std::set< AnnotationGraphVertex > matches = annotationData->matches("AnalysisGraph",v,"annot");
    for (std::set< AnnotationGraphVertex >::const_iterator it = matches.begin();
         it != matches.end(); it++)
    {
      AnnotationGraphVertex vx=*it;
      Token* t=get(vertex_token,*graph,vx);
      /* sauvegarde de tous les vertex */
      if (t != 0)
      {
        //storeAllToken(t);
        //allToken.push_back(t);
        ref.storeAllToken(*t);
      }
      if (annotationData->hasAnnotation(vx, Common::Misc::utf8stdstring2limastring("SpecificEntity")))
      {
        /*const SpecificEntityAnnotation* se =
          annotationData->annotation(vx, Common::Misc::utf8stdstring2limastring("SpecificEntity")).
          pointerValue<SpecificEntityAnnotation>();*/
        //storeSpecificEntity(se);
        //Token* t=get(vertex_token,*graph,vx);
        //storedAnnotations.push_back(*t);
        ref.storeAnnot(*t);
//             std::cout<< "le vertex de nom "<< t->stringForm()<<std::endl;
      }
      }
    }
  }

  /* recherche des coréferences entre les entitées nommées précédemment détectées */

  vector<Token> vectTok;
  vector<Token>::const_iterator it1=ref.getAnnotations().begin(), it1_end=ref.getAnnotations().end();
  for (;
       it1 != it1_end;
       it1++)
  {
//     checkCoreference (*it1,ref);
    vectTok = ref.searchCoreference(*it1);
    if (vectTok.size() > 0)
    {
      corefData->push_back(vectTok);
    }
    ref.searchCoreference(*it1);
  }

  /* get the text */
//   LimaStringText* text=static_cast<LimaStringText*>(analysis.getData("Text"));
  
  return SUCCESS_ID;
}