int PennTreebankTTCompat(char *tag, char *feat) { int pos, number, tense; pos = FeatureGet(feat, FT_POS); if (!FeatureMatch(PennPOS(tag), pos)) { if (streq(tag, "WRB") && pos == F_INTERJECTION) { /* This can be OK. cf <WRB> <Why>.<·Uz¸> (explanation-request) */ } else { return(0); } } if (streq(tag, "CD")) { if (pos != F_NOUN && pos != F_ADJECTIVE) return(0); } else if (streq(tag, "IN")) { if (pos != F_PREPOSITION && pos != F_CONJUNCTION) return(0); } number = FeatureGet(feat, FT_NUMBER); if (streq(tag, "NN") && number != F_SINGULAR) return(0); if (streq(tag, "NNS") && number != F_PLURAL) return(0); if (streq(tag, "NNP") && number != F_SINGULAR) return(0); if (streq(tag, "NNPS") && number != F_PLURAL) return(0); tense = FeatureGet(feat, FT_TENSE); if (streq(tag, "VB") && tense != F_INFINITIVE) return(0); if (streq(tag, "VBD") && tense != F_IMPERFECT) return(0); if (streq(tag, "VBG") && tense != F_PRESENT_PARTICIPLE) return(0); if (streq(tag, "VBN") && tense != F_PAST_PARTICIPLE) return(0); if (streq(tag, "VBP") && tense != F_PRESENT) return(0); if (streq(tag, "VBZ") && tense != F_PRESENT) return(0); /* The VBP/VBZ distinction appears to be error-prone */ /* comparative/superlative not used yet */ return(1); }
const std::list<FeatureMatch> match_features(const cv::Mat &image, const std::list<const Feature*> &features) { QTime a; int i = 0; a.start(); std::list<FeatureMatch> feature_matches; for (const Feature *feature : features) { int found = 0; if (feature->maxCount == 1) { for (const Sprite &sprite : feature->sprites) { i++; const Match m = FindBestMatch(image, MatchTemplate(sprite.img, sprite.mask), CV_TM_CCORR_NORMED); if (m.value > 0) { feature_matches.push_back(FeatureMatch(feature, &sprite, m)); found++; } } if (!found) { qDebug() << "Not found" << feature->humanName; } } else { int found = 0; for (const Sprite &sprite : feature->sprites) { // FIXME: limited maxCount i++; const std::list<Match> matches = FindAllMatches(image, MatchTemplate(sprite.img, sprite.mask), CV_TM_CCORR_NORMED, sprite.detectionThreshold); for (const Match &m : matches) { feature_matches.push_back(FeatureMatch(feature, &sprite, m)); } found += matches.size(); } qDebug() << "Found" << found << "of" << feature->humanName; } } qDebug() << "elapsed" << a.elapsed() << "per search" << a.elapsed() / i; return feature_matches; }
Float Syn_ParseFilterXW_Z(PNode *x, PNode *w, int lang) { int noun_gender, noun_number, noun_person, verb_tense; Obj *cas; PNode *auxverb, *mainverb; if (Syn_Parse_IsOnlyRelativeW(w, lang)) return(0.0); verb_tense = F_NULL; PNodeFindHeadVerb(w, &auxverb, &mainverb); if (auxverb && auxverb->lexitem && (verb_tense = FeatureGet(auxverb->lexitem->features, FT_TENSE)) && (!StringIn(verb_tense, FS_FINITE_TENSE))) { /* Subjects of nonfinite verbs are in objective case. * See Chomsky (1982/1987, p. 207). */ cas = N("obj"); } else { cas = N("subj"); } if (!XBarSatisfiesCaseFilter(x, NULL, cas, lang)) return(0.0); if (x->pn1 && x->pn1->feature == F_S && x->pn2 == NULL && x->pn1->pn1 && x->pn1->pn1->feature == F_VP && x->pn1->pn2 == NULL && x->pn1->pn1->pn1 && x->pn1->pn1->pn1->feature == F_VERB && x->pn1->pn1->pn2 == NULL) { /* The case of an infinitive subject. [X [Z [W [V <garder.fVy¸>]]]] * "Aimer" * This is indeed possible, but it is causing a lot of extra parses. * "Aimer quelqu'un" is allowed. */ return(0.1); /* todoSCORE */ } /* Subject-verb agreement check. */ if (auxverb == NULL) return(1.0); if (auxverb->lexitem == NULL) return(1.0); if (F_IMPERATIVE == FeatureGet(auxverb->lexitem->features, FT_MOOD)) { /* Imperative with subject. */ return(0.2); /* todoSCORE */ } if (!PNodeGetHeadNounFeatures(x, 0, &noun_gender, &noun_number, &noun_person)) { return(1.0); } if (x->pn1 && x->pn2 && x->pn1->feature == F_NP && x->pn2->feature == F_NP && x->pn2->pn1 && x->pn2->pn1->feature == F_CONJUNCTION) { return(1.0); /* todo: Real conjunction agreement rules? */ } #ifdef notdef /* Seems unnecessary in light of FeatureMatch below */ if (F_NULL == noun_person && F_NULL == noun_number) { /* todoSCORE: This is too relaxed? In any case it allows parsing of * Où étais (sic) mon pied gauche ? * Où étaient mes... ? * When would noun_number be F_NULL? */ return(0.6); /* todoSCORE */ } #endif if (F_NULL == noun_person) { /* This is necessary to rule out "I am" where "I" = "isospin". */ noun_person = F_THIRD_PERSON; } if (FeatureMatch(noun_number, FeatureGet(auxverb->lexitem->features, FT_NUMBER)) && FeatureMatch(noun_person, FeatureGet(auxverb->lexitem->features, FT_PERSON))) { return(1.0); } if (Syn_ParseIsNPVerbInversionVP(w, lang)) { /* "What color are elephants? */ return(1.0); } return(0.0); }
/* todo: Something compatible with the dialect is chosen. * Alter this to PREFER dialect-specific choices? * <value_prop> can also just be value number. */ ObjToLexEntry *ObjToLexEntryGet3(Obj *obj, Obj *value_prop, char *features, char *not_usagefeat, int subcat, int paruniv, int *theta_filled, int pass_two, Discourse *dc) { int i, pos, address; char features1[FEATLEN]; ObjToLexEntry *ole, *p; ole = obj->ole; address = DiscourseAddress(dc); if (dc->ga.qwq && (ISA(N("question-word"), obj) || ISA(N("copula"), obj))) { StringCpy(features1, features, FEATLEN); StringElimChar(features1, F_NOUN); features = features1; } for (i = 0, p = ole; p; i++, p = p->next) { if (i >= MAXOLELEN) { Dbg(DBGGENER, DBGBAD, "increase MAXOLELEN"); break; } pos = FeatureGet(p->le->features, FT_POS); /* todo: For super debugging, if we return NULL from this function, * print out list of why each lexical entry failed. */ consider[i] = ((DC(dc).lang == FeatureGetRequired("ObjToLexEntryGet", p->le->features, FT_LANG)) && StringAnyIn(features, p->le->features) && paruniv == FeatureGet(p->features, FT_PARUNIV) && (subcat == F_NULL || subcat == ThetaRoleGetAnySubcat(p->theta_roles)) && F_NULL == FeatureGet(p->features, FT_CONNOTE) /* todo */ && FeatureDialectMatch(DC(dc).dialect, FeatureGet(p->features, FT_DIALECT)) && DC(dc).style == FeatureGet(p->features, FT_STYLE) && FeatureTaskOK(p->features, dc->task) && FeatureMatch(address, FeatureGet(p->features, FT_ADDRESS)) && (not_usagefeat == NULL || (!StringAnyIn(not_usagefeat, p->features))) && ((pos != F_NOUN) || ISA(N("relation"), obj) || ThetaRoleMatch(theta_filled, p->theta_roles, pass_two))); Nop(); /* Debugging anchor. */ } if (!dc->ga.consistent) { for (i = 0, p = ole; p && i < MAXOLELEN; i++, p = p->next) { if (consider[i] && F_FREQUENT == ((uc)FeatureGet(p->features, FT_FREQ)) && p->lastused == UNIXTSNA) { p->lastused = time(NULL); return(p); } } for (i = 0, p = ole; p && i < MAXOLELEN; i++, p = p->next) { if (consider[i] && F_NULL == FeatureGet(p->features, FT_FREQ) && p->lastused == UNIXTSNA) { p->lastused = time(NULL); return(p); } } if (DC(dc).infrequent_ok) { for (i = 0, p = ole; p && i < MAXOLELEN; i++, p = p->next) { if (consider[i] && F_INFREQUENT == FeatureGet(p->features, FT_FREQ) && p->lastused == UNIXTSNA) { p->lastused = time(NULL); return(p); } } } } if (pass_two) { /* In this case ObjToLexEntryClearLastused has already been done. */ return(NULL); } ObjToLexEntryClearLastused(ole, dc); for (i = 0, p = ole; p && i < MAXOLELEN; i++, p = p->next) { if (consider[i] && F_FREQUENT == FeatureGet(p->features, FT_FREQ)) { p->lastused = time(NULL); return(p); } } for (i = 0, p = ole; p && i < MAXOLELEN; i++, p = p->next) { if (consider[i] && F_NULL == FeatureGet(p->features, FT_FREQ)) { p->lastused = time(NULL); return(p); } } /* Note in this case we do permit the generation of infrequent. */ for (i = 0, p = ole; p && i < MAXOLELEN; i++, p = p->next) { if (consider[i] && F_INFREQUENT == FeatureGet(p->features, FT_FREQ)) { p->lastused = time(NULL); return(p); } } return(NULL); }
SlamKeyFrame *SlamMapExpander::addKeyFrame() { ProfileSection s("addKeyFrame"); mStatus = ESlamMapExpanderStatus::AddingFrame; DTSLAM_LOG << "---------------------\n" << "Adding new key frame, ID=" << mData->frame->getTimestamp() << "!\n" << "---------------------\n"; ///////////////////////////////////////////// //Check if we need to start a new region because of scale mismatch if (mRegion->getFirstTriangulationFrame() && mData->poseType == EPoseEstimationType::Essential) { DTSLAM_LOG << "\n-------------------------\nBeginning new region\n-------------------------\n"; //Create new active region mRegion = mSlam->getMap().createRegion(); //Duplicate reference key frame SlamKeyFrame *referenceFrameOld = mData->essentialReferenceFrame; mRegion->setPreviousRegionSourceFrame(referenceFrameOld); std::unique_ptr<SlamKeyFrame> referenceFrameNew = referenceFrameOld->copyWithoutFeatures(); referenceFrameNew->setPose(std::unique_ptr<Pose3D>(new FullPose3D())); //Identity //Duplicate features std::unordered_map<SlamFeature*, SlamFeature*> featureMap; mData->frame->getMeasurements().clear(); for (auto &mPtr : referenceFrameOld->getMeasurements()) { auto &m = *mPtr; SlamFeature *newFeature = mRegion->createFeature2D(*referenceFrameNew, m.getPositions()[0], m.getPositionXns()[0], m.getOctave()); featureMap.insert(std::make_pair(&m.getFeature(), newFeature)); } //Replace features in refined match std::vector<FeatureMatch> newRefinedMatches; std::vector<bool> newRefinedMatchesInliers; for (int i = 0, end = mRefinedMatches.size(); i!=end; ++i) { auto &match = mRefinedMatches[i]; auto &m = match.measurement; //Check if feature is in new region auto itFeatureMap = featureMap.find(&m.getFeature()); if (itFeatureMap != featureMap.end()) { //Feature in new region, copy newRefinedMatches.push_back(FeatureMatch(FeatureProjectionInfo(), NULL, SlamFeatureMeasurement(itFeatureMap->second, &m.getKeyFrame(), m.getPositions(), m.getPositionXns(), m.getOctave()), match.trackLength)); newRefinedMatchesInliers.push_back(mRefinedMatchesInliers[i]); } } mRefinedMatches = std::move(newRefinedMatches); //Add reference frame for new region mRegion->addKeyFrame(std::move(referenceFrameNew)); //Update active region mSlam->setActiveRegion(mRegion); //Transform essential pose to new reference frame (where referenceFrame is at the origin) Pose3D &oldEssentialPose = mData->frame->getPose(); mData->frame->setPose(std::unique_ptr<Pose3D>(new FullPose3D(FullPose3D::MakeRelativePose(referenceFrameOld->getPose(), oldEssentialPose)))); mData->essentialReferenceFrame = mRegion->getKeyFrames().front().get(); int newCount = checkRefinedMatchesForTriangulations(); DTSLAM_LOG << "After checking again " << newCount << "/" << mRefinedMatches.size() << " matches ready for triangulation.\n"; } ///////////////////////////////////////////// //Add measurements to matched features int triangulateCount=0; for (int i = 0, end = mRefinedMatches.size(); i != end; ++i) { FeatureMatch &match = mRefinedMatches[i]; SlamFeature &feature = match.measurement.getFeature(); if (feature.getStatus() == SlamFeatureStatus::Invalid) continue; //Skip features that were deleted in the process //Add measurements std::unique_ptr<SlamFeatureMeasurement> measurement(new SlamFeatureMeasurement(match.measurement)); mData->frame->getMeasurements().push_back(measurement.get()); feature.getMeasurements().push_back(std::move(measurement)); //Triangulate if (mRefinedMatchesReadyForTriangulation[i] && !feature.is3D()) { SlamFeatureMeasurement *m1; SlamFeatureMeasurement *m2; float angle; feature.getMeasurementsForTriangulation(m1,m2,angle); if(m1 && m2) // && angle > mMinTriangulationAngle { mRegion->convertTo3D(feature,*m1,*m2); triangulateCount++; } } } DTSLAM_LOG << "Features triangulated: " << triangulateCount << "\n"; ///////////////////////////////////////////// //Add new 2D features int newKeyPointCount=0; cv::Size2i imageSize = mData->frame->getImage(0).size(); for (int octave = 0; octave<mData->frame->getPyramid().getOctaveCount(); ++octave) { const int scale = 1<<octave; //FeatureGridIndexer<KeyPointData> keypoints = mData->frame->getKeyPoints(octave).applyNonMaximaSuppresion(scale*PatchWarper::kPatchRightSize); //auto &keypoints = mData->frame->getKeyPoints(octave); cv::Size2i tileSize(scale*FLAGS_FrameKeypointGridSize, scale*FLAGS_FrameKeypointGridSize); auto keypoints = FeatureGridIndexer<KeyPointData>::ApplyNonMaximaSuppresion(mData->frame->getKeyPoints(octave), imageSize, tileSize, scale*PatchWarper::kPatchRightSize); for (auto &keyPoint : keypoints) { int cellX = keyPoint.position.x / mFeatureCoverageMaskScale; int cellY = keyPoint.position.y / mFeatureCoverageMaskScale; if(mFeatureCoverageMask(cellY,cellX) != ECellCoveredByOld) { //Add new 2D feature mRegion->createFeature2D(*mData->frame, keyPoint.position, keyPoint.xn, octave); newKeyPointCount++; } } } DTSLAM_LOG << "New 2D features added: " << newKeyPointCount << "\n"; ///////////////////////////////////////////// //Add key frame SlamKeyFrame *res = mData->frame.get(); mData->frame->mOriginalRegionID = mRegion->getId(); mRegion->addKeyFrame(std::move(mData->frame)); mStatus = ESlamMapExpanderStatus::Inactive; return res; }