void EventTemplateData::setTypeInCurrentTemplate(const std::string& type) { LOGINIT("LP::EventAnalysis"); LDEBUG << "set Current Template Type " << type; back().setType(type); LDEBUG << "bak.getType " << back().getType(); }
//*********************************************************************** void SpecificEntitiesLoader:: init(Common::XMLConfigurationFiles::GroupConfigurationStructure& unitConfiguration, Manager* manager) { LOGINIT("LP::SpecificEntities"); m_language=manager->getInitializationParameters().media; AnalysisLoader::init(unitConfiguration,manager); try { m_graph=unitConfiguration.getParamsValueAtKey("graph"); } catch (Common::XMLConfigurationFiles::NoSuchParam& ) {} // keep default value try { // may need to initialize a modex, to know about the entities in external file deque<string> modex=unitConfiguration.getListsValueAtKey("modex"); for (deque<string>::const_iterator it=modex.begin(),it_end=modex.end();it!=it_end;it++) { LDEBUG << "loader: initialize modex " << *it; string filename=Common::MediaticData::MediaticData::single().getConfigPath()+"/"+*it; Common::XMLConfigurationFiles::XMLConfigurationFileParser parser(filename); Common::MediaticData::MediaticData::changeable().initEntityTypes(parser); } } catch (Common::XMLConfigurationFiles::NoSuchList& ) { LWARN << "loader: no modex specified in parameter: types in file loaded may not be known"; } // Create a SAX parser object. m_parser = new QXmlSimpleReader(); }
void SegmentFeatureEntityInData:: update(const AnalysisContent& analysis) { // get result data const AnalysisData* resultData=analysis.getData(m_dataName); if (resultData == 0) { LOGINIT("LP::Segmentation"); LERROR << "no data " << m_data << "in AnalysisContent" << LENDL; } m_data=dynamic_cast<const ApplyRecognizer::RecognizerResultData*>(resultData); if (m_data == 0) { LOGINIT("LP::Segmentation"); LERROR << "data " << m_data << "in AnalysisContent is not a RecognizerResultData" << LENDL; } }
LimaStatusCode ExternalProcessUnit::process(AnalysisContent& analysis) const { TimeUtils::updateCurrentTime(); LOGINIT("LP:External"); LINFO << "ExternalProcessUnit: start" << LENDL; LimaStatusCode returnCode(SUCCESS_ID); // produce temporary file with the given dumper LDEBUG << "ExternalProcessUnit: write tmp file" << LENDL; returnCode=m_dumper->process(analysis); if (returnCode!=SUCCESS_ID) { LERROR << "ExternalProcessUnit: failed to dump data to temporary file" << LENDL; return returnCode; } // apply command line LDEBUG << "ExternalProcessUnit: apply external program" << LENDL; QProcess::execute(m_commandLine.c_str()); // load results from the external program with the given loader LDEBUG << "ExternalProcessUnit: read results" << LENDL; returnCode=m_loader->process(analysis); if (returnCode!=SUCCESS_ID) { LERROR << "ExternalProcessUnit: failed to load data from temporary file" << LENDL; return returnCode; } TimeUtils::logElapsedTime("ExternalProcessUnit"); return returnCode; }
void EventParagraph::write(std::ostream& file) const { LOGINIT("LP::EventAnalysis"); LDEBUG << "EventParagraph::write().."<< LENDL; Common::Misc::writeCodedInt(file,m_position); Common::Misc::writeCodedInt(file,m_length); LDEBUG << "EventParagraph::write: write typeId of m_evententities..." << m_evententities.first.getTypeId() << LENDL; Common::Misc::writeCodedInt(file,m_evententities.first.getTypeId()); LDEBUG << "EventParagraph::write: write groupId of m_evententities..." << m_evententities.first.getGroupId() << LENDL; Common::Misc::writeCodedInt(file,m_evententities.first.getGroupId()); Common::Misc::writeCodedInt(file,m_evententities.second.size()); for(std::vector<Entity*>::const_iterator iT=m_evententities.second.begin();iT!=m_evententities.second.end();iT++) { LDEBUG << "EventParagraph::write m_evententities"<< LENDL; (*iT)->write(file); } LDEBUG << "EventParagraph::write size of m_otherentities (" << m_otherentities.size() << ")...."<< LENDL; Common::Misc::writeCodedInt(file,m_otherentities.size()); for(std::map<EntityType,std::vector<Entity *> >::const_iterator iT=m_otherentities.begin(); iT!=m_otherentities.end();iT++) { LDEBUG << "EventParagraph::write m_otherentity...."<< LENDL; Common::Misc::writeCodedInt(file,(*iT).first.getTypeId()); Common::Misc::writeCodedInt(file,(*iT).first.getGroupId()); Common::Misc::writeCodedInt(file,(*iT).second.size()); for(std::vector<Entity *>::const_iterator iT2 =(*iT).second.begin();iT2!=(*iT).second.end();iT2++) { (*iT2)->write(file); } } }
void DumpXMLAnnotationVisitor::examine_edge(LinguisticGraphEdge e, const LinguisticGraph& g) { LinguisticGraphVertex v = target(e, g); // process if (m_ad->hasAnnotation(v, Common::Misc::utf8stdstring2limastring("WordSense"))) { GenericAnnotation ga = (m_ad->annotation(v,utf8stdstring2limastring("WordSense"))); Lima::LinguisticProcessing::WordSenseDisambiguation::WordSenseAnnotation wsa; try { wsa = ga.value<Lima::LinguisticProcessing::WordSenseDisambiguation::WordSenseAnnotation>(); wsa.outputXml(m_ostream,g); } catch (const boost::bad_any_cast& e) { LOGINIT("WordSenseDisambiguator"); LERROR << "non word sense annotation"; } } else { Token* token = get(vertex_token, g, v); if (token != 0) { std::string s = Common::Misc::limastring2utf8stdstring(token->stringForm()); m_ostream << s; } } m_ostream << " "; }
BOOL d3d_init(void) { HRESULT res; GUID guid; Assert(d3d_initialized == FALSE); LOGINIT("d3d.log"); atexit(d3d_close); // initialize Direct3d interface res = IDirectDraw_QueryInterface(_lpDD, &IID_IDirect3D, &_lpD3D); if (res != DD_OK) return d3d_handle_error(res); // enumerate all 3d devices useable by Direct3d if (d3d_enum_devices(&guid)) return FALSE; memcpy(&_3DGUID, &guid, sizeof(GUID)); d3d_initialized = TRUE; return TRUE; }
void cpu30_state::machine_reset () { LOGINIT(("%s\n", FUNCNAME)); /* Reset pointer to bootvector in ROM for bootvector handler bootvect_r */ if (m_sysrom == &m_sysram[0]) /* Condition needed because memory map is not setup first time */ m_sysrom = (uint32_t*)(memregion ("roms")->base () + 0x800000); }
LimaStatusCode SpecificEntitiesLoader:: process(AnalysisContent& analysis) const { // get analysis graph AnalysisGraph* graph=static_cast<AnalysisGraph*>(analysis.getData(m_graph)); if (graph==0) { LOGINIT("LP::SpecificEntities"); LERROR << "no graph '" << m_graph << "' available !"; return MISSING_DATA; } //create a RecognizerData (such as in ApplyRecognizer) to be able to use //CreateSpecificEntity actions RecognizerData* recoData=new RecognizerData; analysis.setData("RecognizerData",recoData); RecognizerResultData* resultData=new RecognizerResultData(m_graph); recoData->setResultData(resultData); try { SpecificEntitiesLoader::XMLHandler handler(m_language,analysis,graph); m_parser->setContentHandler(&handler); m_parser->setErrorHandler(&handler); QFile file(getInputFile(analysis).c_str()); if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) throw XMLException(); if (!m_parser->parse( QXmlInputSource(&file))) { throw XMLException(); } } catch (const XMLException& ) { LOGINIT("LP::SpecificEntities"); LERROR << "Error: failed to parse XML input file"; } // remove recognizer data (used only internally to this process unit) recoData->deleteResultData(); resultData=0; analysis.removeData("RecognizerData"); return SUCCESS_ID; }
bool SpecificEntitiesLoader::XMLHandler::fatalError(const QXmlParseException& e) { LOGINIT("LP::SpecificEntities"); LWARN << "Warning at file " << toString(e.systemId()) << ", line " << e.lineNumber() << ", char " << e.columnNumber() << " Message: " << toString(e.message()); return false; }
bool SpecificEntitiesLoader::XMLHandler::warning(const QXmlParseException& e) { LOGINIT("LP::SpecificEntities"); LERROR << "Error at file " << toString(e.systemId()) << ", line " << e.lineNumber() << ", char " << e.columnNumber() << " Message: " << toString(e.message()); return true; }
void SegmentFeatureEntity:: update(const AnalysisContent& analysis) { m_annotationData = static_cast<const AnnotationData*>(analysis.getData("AnnotationData")); if (m_annotationData==0) { LOGINIT("LP::Segmentation"); LERROR << "no annotation graph available !" << LENDL; } }
//---------------------------------------------------------------------- void EventTemplateDefinitionResource:: init(GroupConfigurationStructure& unitConfiguration, Manager* manager) { LOGINIT("LP::EventAnalysis"); m_language=manager->getInitializationParameters().language; string resourcesPath=Common::MediaticData::MediaticData::single().getResourcesPath(); EventTemplateStructure structure; // get name try { string name = unitConfiguration.getParamsValueAtKey("templateName"); structure.setName(name); } catch (NoSuchParam& ) { LERROR << "No param 'templateName' in EventTemplateDefinitionResource for language " << (int)m_language; throw InvalidConfiguration(); } // get template elements: role and entity types try { map<string,string> elts = unitConfiguration.getMapAtKey("templateElements"); for(map<string,string>::const_iterator it=elts.begin(),it_end=elts.end();it!=it_end;it++) { structure.addTemplateElement((*it).first,(*it).second); } } catch (NoSuchParam& ) { LERROR << "No param 'templateName' in EventTemplateDefinition for language " << (int)m_language; throw InvalidConfiguration(); } // get element mapping, for template merging try { map<string,string> mapping = unitConfiguration.getMapAtKey("elementMapping"); for(map<string,string>::const_iterator it=mapping.begin(),it_end=mapping.end();it!=it_end;it++) { const std::string& elements=(*it).second; // comma-separated list of elements boost::char_separator<char> sep(",; "); boost::tokenizer<boost::char_separator<char> > tok(elements,sep); for(boost::tokenizer<boost::char_separator<char> >::iterator e=tok.begin(),e_end=tok.end(); e!=e_end;e++) { LDEBUG << "EventTemplateDefinitionResource: add mapping " << (*it).first << ":" << *e; m_elementMapping[(*it).first].insert(*e); } } } catch (NoSuchParam& ) { LDEBUG << "No param 'elementMapping' in EventTemplateDefinition for language " << (int)m_language; } m_templates.push_back(structure); }
LimaStatusCode EventTemplateDataDumper::process(AnalysisContent& analysis) const { LOGINIT("LP::EventAnalysis"); LDEBUG << "EventTemplateDataDumper::process" << LENDL; TimeUtils::updateCurrentTime(); // initialize output DumperStream* dstream=AbstractTextualAnalysisDumper::initialize(analysis); ostream& out=dstream->out(); const AnnotationData* annotationData = static_cast< AnnotationData* >(analysis.getData("AnnotationData")); if (annotationData==0) { LERROR << "no annotation graph available !" << LENDL; return MISSING_DATA; } if (! m_eventData.empty()) { const AnalysisData* data =analysis.getData(m_eventData); if (data!=0) { // see if the data is of type Events const EventTemplateData* eventData=dynamic_cast<const EventTemplateData*>(data); if (eventData==0) { LOGINIT("LP::EventAnalysis"); LERROR << "data '" << m_eventData << "' is neither of type EventData nor Events" << LENDL; return MISSING_DATA; } else { Events *events=eventData->convertToEvents(annotationData); events->write(out); } } else { LOGINIT("LP::EventAnalysis"); LERROR << "no data of name " << m_eventData << LENDL; } } delete dstream; TimeUtils::logElapsedTime("EventTemplateDataDumper"); return SUCCESS_ID; }
const std::string& EventTemplateDefinitionResource::getMention (const std::string name) const { static std::string mention=""; LOGINIT("LP::EventAnalysis"); LDEBUG << "getMention m_templates.size() " << m_templates.size(); for(std::vector<EventTemplateStructure>::const_iterator it=m_templates.begin();it!=m_templates.end();it++) { LDEBUG << "Cuurent Mention " << it->getMention(); if (name.compare(it->getName())==0) return it->getMention(); } return mention; }
const std::map<std::string,Common::MediaticData::EntityType>& EventTemplateDefinitionResource::getStructure (const std::string name) const { static std::map<std::string,Common::MediaticData::EntityType> structure; LOGINIT("LP::EventAnalysis"); LDEBUG << "getMention m_templates.size() " << m_templates.size(); for(std::vector<EventTemplateStructure>::const_iterator it=m_templates.begin();it!=m_templates.end();it++) { //LDEBUG << "Cuurent Mention " << it->getMention(); if (name.compare(it->getName())==0) return it->getStructure(); } return structure; }
void EventTemplateStructure::addTemplateElement(const std::string& role, const std::string entityType) { if (m_structure.find(role)!=m_structure.end()) { LOGINIT("LP::EventAnalysis"); LERROR << "In event " << m_name << ", element '"<< role <<"' is defined twice" ; } else { Common::MediaticData::EntityType type= Common::MediaticData::MediaticData::single().getEntityType(Common::Misc::utf8stdstring2limastring(entityType)); m_structure[role]=type; } }
void SegmentFeatureInSegment:: update(const AnalysisContent& analysis) { const AnalysisData* data=analysis.getData(m_segmentData); if (data==0) { LOGINIT("LP::Segmentation"); LERROR << SegmentFeatureInSegment_ID << ": No data " << m_segmentData << LENDL; m_data=0; } else { m_data=static_cast<const SegmentationData*>(data); } }
//*********************************************************************** // xerces XML handler SpecificEntitiesLoader::XMLHandler::XMLHandler(MediaId language, AnalysisContent& analysis, AnalysisGraph* graph): m_language(language), m_analysis(analysis), m_graph(graph), m_position(0), m_length(0), m_type(), m_string(), m_currentElement() { LOGINIT("LP::SpecificEntities"); LDEBUG << "SpecificEntitiesLoader::XMLHandler constructor"; }
// Datas are extracted from word sense annotations and written on the xml file according to the given dtd format LimaStatusCode WordSenseXmlLogger::process( AnalysisContent& analysis) const { TimeUtils::updateCurrentTime(); LinguisticMetaData* metadata=static_cast<LinguisticMetaData*>(analysis.getData("LinguisticMetaData")); if (metadata == 0) { LOGINIT("WordSenseDisambiguator"); LERROR << "no LinguisticMetaData ! abort"; return MISSING_DATA; } string textFileName = metadata->getMetaData("FileName"); string outputFile = textFileName + m_outputSuffix; ofstream out(outputFile.c_str(), std::ofstream::binary); if (!out.good()) { throw runtime_error("can't open file " + outputFile); } AnalysisGraph* /*anagraph=static_cast<AnalysisGraph*>(analysis.getData("SimpleGraph")); if (anagraph==0)*/ anagraph=static_cast<AnalysisGraph*>(analysis.getData("PosGraph")); if (anagraph==0) { LOGINIT("WordSenseDisambiguator"); LERROR << "no AnalysisGraph ! abort"; return MISSING_DATA; } dump(out, anagraph,/* static_cast<SyntacticData*>(analysis.getData("SyntacticData")),*/ static_cast<AnnotationData*>(analysis.getData("AnnotationData"))); out.flush(); out.close(); TimeUtils::logElapsedTime("WordSenseDisambiguatorXmlLogger"); return SUCCESS_ID; }
//*********************************************************************** void SegmentationDataXmlLogger::init( Common::XMLConfigurationFiles::GroupConfigurationStructure& unitConfiguration, Manager* manager) { LOGINIT("LP::Segmentation"); LDEBUG << "SegmentationDataXmlLogger::init" << LENDL; AbstractLinguisticLogger::init(unitConfiguration,manager); try { m_data=unitConfiguration.getParamsValueAtKey("data"); } catch (Common::XMLConfigurationFiles::NoSuchParam& ) {} // do nothing, keep default }
void EventTemplateDataXmlLogger::outputEntity(std::ostream& out, const LinguisticAnalysisStructure::AnalysisGraph* graph, LinguisticGraphVertex v, const AnnotationData* annotationData) const { LinguisticAnalysisStructure::Token* token=get(vertex_token, *(graph->getGraph()), v); if (token==0) { LOGINIT("LP::EventAnalysis"); LWARN << "EventTemplateDataXmlLogger: no token for vertex " << v; return; } out << " <entityOccurrence" << " pos=\"" << token->position() << "\"" << " len=\"" << token->length() << "\""; string str(""),norm(""); str=xmlString(Common::Misc::limastring2utf8stdstring(token->stringForm())); // check if vertex corresponds to a specific entity found std::set< AnnotationGraphVertex > matches = annotationData->matches(graph->getGraphId(),v,"annot"); for (std::set< AnnotationGraphVertex >::const_iterator it = matches.begin(); it != matches.end(); it++) { AnnotationGraphVertex vx=*it; if (annotationData->hasAnnotation(vx, Common::Misc::utf8stdstring2limastring("SpecificEntity"))) { const SpecificEntityAnnotation* se = annotationData->annotation(vx, Common::Misc::utf8stdstring2limastring("SpecificEntity")). pointerValue<SpecificEntityAnnotation>(); const Automaton::EntityFeatures& features=se->getFeatures(); for (Automaton::EntityFeatures::const_iterator featureItr=features.begin(),features_end=features.end(); featureItr!=features_end; featureItr++) { if (featureItr->getName()=="value") { norm=xmlString(featureItr->getValueString()); } } } } if (! str.empty()) { out << " string=\"" << str << "\""; out << " norm=\"" << norm << "\""; out << ">" << str ; } out << "</entityOccurrence>" << endl; }
bool SpecificEntitiesLoader::XMLHandler::endElement(const QString & namespaceURI, const QString & eltName, const QString & qName) { LIMA_UNUSED(namespaceURI); LIMA_UNUSED(qName); //LOGINIT("LP::SpecificEntities"); //LDEBUG << "SpecificEntitiesLoader::XMLHandler end element " << toString(eltName); string name=toString(eltName); if (name=="specific_entity") { LOGINIT("LP::SpecificEntities"); LDEBUG << "SpecificEntitiesLoader::XMLHandler add SE " << m_type << "," << m_position << "," << m_length << "," << m_graph; addSpecificEntity(m_analysis, m_graph, m_string, m_type, m_position, m_length); } // no more current element m_currentElement=""; return true; }
INPUT_PORTS_END /* Start it up */ void cpu30_state::machine_start () { LOGINIT(("%s\n", FUNCNAME)); save_pointer (NAME (m_sysrom), sizeof(m_sysrom)); save_pointer (NAME (m_sysram), sizeof(m_sysram)); /* setup ram */ m_maincpu->space(AS_PROGRAM).install_ram(0x08, m_ram->size() - 1, m_ram->pointer()); /* Setup pointer to bootvector in ROM for bootvector handler bootvect_r */ m_sysrom = (uint32_t*)(memregion ("roms")->base () + 0x800000); }
Entity* EventTemplateData:: createEntity(const LinguisticAnalysisStructure::AnalysisGraph* graph, LinguisticGraphVertex v, const AnnotationData* annotationData, const std::string& role, uint64_t eventId) const { LinguisticAnalysisStructure::Token* token=get(vertex_token, *(graph->getGraph()), v); if (token==0) { LOGINIT("LP::EventAnalysis"); LWARN << "EventTemplateDataXmlLogger: no token for vertex " << v; return 0; } // store eventId feature as string (for output in WebLab) ostringstream eid; eid << eventId; // check if vertex corresponds to a specific entity found std::set< AnnotationGraphVertex > matches = annotationData->matches(graph->getGraphId(),v,"annot"); for (std::set< AnnotationGraphVertex >::const_iterator it = matches.begin(); it != matches.end(); it++) { AnnotationGraphVertex vx=*it; if (annotationData->hasAnnotation(vx, Common::Misc::utf8stdstring2limastring("SpecificEntity"))) { const SpecificEntityAnnotation* se = annotationData->annotation(vx, Common::Misc::utf8stdstring2limastring("SpecificEntity")). pointerValue<SpecificEntityAnnotation>(); Automaton::EntityFeatures features=se->getFeatures(); features.setFeature("role",role); features.setFeature("eventId",eid.str()); Entity* e=new Entity(token->position(),token->length(),features); e->setMain(true); return e; } } // otherwise, have to create entity features Automaton::EntityFeatures features; features.setFeature("value",token->stringForm()); features.setFeature("role",role); features.setFeature("eventId",eid.str()); Entity* e=new Entity(token->position(),token->length(),features); e->setMain(true); return e; }
void SegmentFeaturePosition:: update(const AnalysisContent& analysis) { // update offset from metadata const LinguisticMetaData* metadata=static_cast<const LinguisticMetaData*>(analysis.getData("LinguisticMetaData")); if (metadata == 0) { LOGINIT("LP::Segmentation"); LWARN << "no LinguisticMetaData ! abort" << LENDL; } else { try { m_offset=atoi(metadata->getMetaData("StartOffset").c_str()); } catch (LinguisticProcessingException& ) { // do nothing: not set in analyzeText (only in analyzeXmlDocuments) } } }
WordUnit::WordUnit(KnnSearcher* searcher, const Lemma2Index& lemma2Index, const Index2Lemma& index2Lemma, string lemma, Mode mode, string sensesPath) : m_lemma(lemma), m_mode(mode) { LOGINIT("WordSenseDisambiguator"); if (lemma2Index.find(lemma) != lemma2Index.end()) { m_lemmaId = lemma2Index.find(lemma)->second; } else { LWARN << "no lemmaId for "<< lemma << " in lemma2Index "; } m_nbSenses = loadSenses(searcher, lemma2Index, index2Lemma, sensesPath); }
//*********************************************************************** void EventTemplateDataXmlLogger::init(Common::XMLConfigurationFiles::GroupConfigurationStructure& unitConfiguration, Manager* manager) { LOGINIT("LP::EventAnalysis"); LDEBUG << "EventTemplateDataXmlLogger::init"; AbstractTextualAnalysisDumper::init(unitConfiguration,manager); try { m_eventData=unitConfiguration.getParamsValueAtKey("eventTemplateData"); } catch (Common::XMLConfigurationFiles::NoSuchParam& ) { LDEBUG << "EventTemplateDataXmlLogger: no parameter 'eventTemplateData', use default ('"<<m_eventData << "')"; // not an error, keep default } }
int main() { LOGINIT("./", "test.log", DDEBUG); uint64_t i = 0; for (i = 0; i < 99999999; i++) { LOGDDEBUG("DDEBUG:"); LOGDDEBUG("%d", i); LOGDEBUG("%d", i); LOGINFO("%d", i); LOGWARNING("%d", i); LOGERROR("%d", i); } }
//*********************************************************************** SegmentFeatureEntityInData::SegmentFeatureEntityInData(MediaId language, const std::string& complement): AbstractSegmentFeatureExtractor(language,complement), m_entityName(), m_dataName(), m_entityType() { // complement contains the name of the entity + the name of the data // in which it is stored (separated by a comma) string::size_type i=complement.find(","); if (i==string::npos) { LOGINIT("LP::Segmentation"); LERROR << "No data name provided for entityInData" << LENDL; m_entityName=complement; } else { m_entityName=string(complement,0,i); m_dataName=string(complement,i+1); } m_entityType=Common::MediaticData::MediaticData::single(). getEntityType(Common::Misc::utf8stdstring2limastring(m_entityName)); }