Exemple #1
0
	void PartKeyFrames::GetKeyFrameForTime(float time, KeyFrame **prev, KeyFrame **next)
	{
		// go through all the keyframes, check time
		KeyFrame *lastKeyFrame = NULL;
		for (std::list<KeyFrame>::iterator i = keyFrames.begin(); i != keyFrames.end(); ++i)
		{
			KeyFrame *keyFrame = &(*i);
			if (time > keyFrame->GetTime())
			{
				*prev = keyFrame;
			}
			else
			{
				*next = keyFrame;
				return;
			}
			lastKeyFrame = keyFrame;
		}
	}
void
DSPasteKeysCommand::addOrRemoveKeyframe(bool add)
{
    for (std::list<boost::weak_ptr<DSKnob> >::const_iterator it = _dstKnobs.begin(); it != _dstKnobs.end(); ++it) {
        DSKnobPtr knobContext = it->lock();
        if (!knobContext) {
            continue;
        }
        for (std::size_t i = 0; i < _keys.size(); ++i) {

            int dim = knobContext->getDimension();
            KnobIPtr knob = knobContext->getInternalKnob();
            knob->beginChanges();

            double keyTime = _keys[i].key.getTime();
            double setTime = _pasteRelativeToRefTime ? keyTime - _keys[_refKeyindex].key.getTime() + _refTime : keyTime;

            if (add) {

                for (int j = 0; j < knob->getDimension(); ++j) {
                    if ( (dim == -1) || (j == dim) ) {
                        KeyFrame k = _keys[i].key;
                        k.setTime(setTime);

                        knob->setKeyFrame(k, ViewSpec::all(), j, eValueChangedReasonNatronGuiEdited);
                    }
                }
            } else {
                for (int j = 0; j < knob->getDimension(); ++j) {
                    if ( (dim == -1) || (j == dim) ) {
                        knob->deleteValueAtTime(eCurveChangeReasonDopeSheet, setTime, ViewSpec::all(), j, i == 0);
                    }
                }
            }
            
            knob->endChanges();
        }
    }


    _model->refreshSelectionBboxAndRedrawView();
} // DSPasteKeysCommand::addOrRemoveKeyframe
Exemple #3
0
Natron::StatusEnum
Parametric_Knob::getNthControlPoint(int dimension,
                                    int nthCtl,
                                    double *key,
                                    double *value)
{
    ///Mt-safe as Curve is MT-safe
    if ( dimension >= (int)_curves.size() ) {
        return eStatusFailed;
    }
    KeyFrame kf;
    bool ret = _curves[dimension]->getKeyFrameWithIndex(nthCtl, &kf);
    if (!ret) {
        return eStatusFailed;
    }
    *key = kf.getTime();
    *value = kf.getValue();
    
    return eStatusOK;
}
Exemple #4
0
bool KeyFrame::merge(const KeyFrame &o)
{
	if(getMeshes().size() == 0)
	{
		meshes = o.getMeshes();
	}
	else
	{
		if(getMeshes().size() != o.getMeshes().size())
			return false;

		for(vector<Mesh*>::const_iterator iter=o.getMeshes().begin();
            iter != o.getMeshes().end();
            ++iter)
		{
			meshes.push_back(*iter);
		}
	}

	return true;
}
void AnimationToJson::fromJsonObject(KeyFrame &keyFrame, const QJsonObject &object, const QString &fileName)
{
    keyFrame.setFileName(QFileInfo(fileName).dir().absolutePath() + "/" + object.find("fileName").value().toString());
    keyFrame.setOffset(pointFromJsonObject(object.find("offset").value().toObject()));
    keyFrame.setRect(rectFromJsonObject(object.find("rect").value().toObject()));

    QJsonObject customPropertiesObject = object.find("customProperties").value().toObject();
//    qDebug() << customPropertiesObject;
    for (auto it = customPropertiesObject.begin(); it != customPropertiesObject.end(); ++it) {
        keyFrame.setCustomProperty(it.key(), it.value().toDouble());
    }
//    for (const auto &value : customPropertiesObject) {
//        value.
//        qDebug() << value;
//        for (auto it = value.begin(); it != value.end(); ++it) {
//            keyFrame.setCustomProperty(it.key(), it.value().toDouble());
//        }
//    }

    for (const QJsonValue &value : object.find("hitBoxes").value().toArray()) {
        auto hitBox = new HitBox();
        fromJsonObject(*hitBox, value.toObject());
        keyFrame.insertHitBox(keyFrame.hitBoxes().count(), hitBox);
    }
}
QJsonObject AnimationToJson::toJsonObject(const KeyFrame &keyFrame)
{
    QJsonObject object;
    object.insert("fileName", QFileInfo(keyFrame.fileName()).fileName());
    object.insert("rect", toJsonObject(keyFrame.rect()));
//    qDebug() << keyFrame.rect() << toJsonObject(keyFrame.rect());
    object.insert("offset", toJsonObject(keyFrame.offset()));

    QJsonObject customPropertiesObject;
    const auto customProperties = keyFrame.customProperties();
    for(auto it = customProperties.begin(); it != customProperties.end(); ++it) {
        customPropertiesObject.insert(it.key(), it.value());
    }
    object.insert("customProperties", customPropertiesObject);

    QJsonArray hitBoxes;
    for (HitBox *hitBox : keyFrame.hitBoxes()) {
        hitBoxes.append(toJsonObject(*hitBox));
    }
    object.insert("hitBoxes", hitBoxes);
    return object;
}
Exemple #7
0
 void MapPoint::UpdateNormalAndDepth()
 {
     map<KeyFrame*,size_t> observations;
     KeyFrame* pRefKF;
     cv::Mat Pos;
     {
         boost::mutex::scoped_lock lock1(mMutexFeatures);
         boost::mutex::scoped_lock lock2(mMutexPos);
         if(mbBad)
             return;
         observations=mObservations;
         pRefKF=mpRefKF;
         Pos = mWorldPos.clone();
     }
     
     cv::Mat normal = cv::Mat::zeros(3,1,CV_32F);
     int n=0;
     for(map<KeyFrame*,size_t>::iterator mit=observations.begin(), mend=observations.end(); mit!=mend; mit++)
     {
         KeyFrame* pKF = mit->first;
         cv::Mat Owi = pKF->GetCameraCenter();
         cv::Mat normali = mWorldPos - Owi;
         normal = normal + normali/cv::norm(normali);
         n++;
     } 
     
     cv::Mat PC = Pos - pRefKF->GetCameraCenter();
     const float dist = cv::norm(PC);
     const int level = pRefKF->GetKeyPointScaleLevel(observations[pRefKF]);
     const float scaleFactor = pRefKF->GetScaleFactor();
     const float levelScaleFactor =  pRefKF->GetScaleFactor(level);
     const int nLevels = pRefKF->GetScaleLevels();
     
     {
         boost::mutex::scoped_lock lock3(mMutexPos);
         mfMinDistance = (1.0f/scaleFactor)*dist / levelScaleFactor;
         mfMaxDistance = scaleFactor*dist * pRefKF->GetScaleFactor(nLevels-1-level);
         mNormalVector = normal/n;
     }
 }
 void LocalMapping::KeyFrameCulling()
 {
     // Check redundant keyframes (only local keyframes)
     // A keyframe is considered redundant if the 90% of the MapPoints it sees, are seen
     // in at least other 3 keyframes (in the same or finer scale)
     vector<KeyFrame*> vpLocalKeyFrames = mpCurrentKeyFrame->GetVectorCovisibleKeyFrames();
     
     for(vector<KeyFrame*>::iterator vit=vpLocalKeyFrames.begin(), vend=vpLocalKeyFrames.end(); vit!=vend; vit++)
     {
         KeyFrame* pKF = *vit;
         if(pKF->mnId==0)
             continue;
         vector<MapPoint*> vpMapPoints = pKF->GetMapPointMatches();
         
         int nRedundantObservations=0;
         int nMPs=0;
         for(size_t i=0, iend=vpMapPoints.size(); i<iend; i++)
         {
             MapPoint* pMP = vpMapPoints[i];
             if(pMP)
             {
                 if(!pMP->isBad())
                 {
                     nMPs++;
                     if(pMP->Observations()>3)
                     {
                         int scaleLevel = pKF->GetKeyPointUn(i).octave;
                         map<KeyFrame*, size_t> observations = pMP->GetObservations();
                         int nObs=0;
                         for(map<KeyFrame*, size_t>::iterator mit=observations.begin(), mend=observations.end(); mit!=mend; mit++)
                         {
                             KeyFrame* pKFi = mit->first;
                             if(pKFi==pKF)
                                 continue;
                             int scaleLeveli = pKFi->GetKeyPointUn(mit->second).octave;
                             if(scaleLeveli<=scaleLevel+1)
                             {
                                 nObs++;
                                 if(nObs>=3)
                                     break;
                             }
                         }
                         if(nObs>=3)
                         {
                             nRedundantObservations++;
                         }
                     }
                 }
             }
         }
         
         if(nRedundantObservations>0.9*nMPs)
             pKF->SetBadFlag();
     }
 }
Exemple #9
0
bool Curve::addKeyFrame(KeyFrame key)
{
    
    QWriteLocker l(&_imp->_lock);
    if (_imp->type == CurvePrivate::BOOL_CURVE || _imp->type == CurvePrivate::STRING_CURVE) {
        key.setInterpolation(Natron::KEYFRAME_CONSTANT);
    }
    
    
    std::pair<KeyFrameSet::iterator,bool> it = addKeyFrameNoUpdate(key);
    
    evaluateCurveChanged(KEYFRAME_CHANGED,it.first);
    l.unlock();
    ///This call must not be locked!
    if (_imp->owner) {
        _imp->owner->evaluateAnimationChange();
    }
    return it.second;
}
	bool AnimationTrack::getFrame(f32 timeMs, KeyFrame& frame) const
	{
		array_t<KeyFrame*>::const_iterator iterKeyframeBefore;
		array_t<KeyFrame*>::const_iterator iterKeyframeAfter;

		// get the key frame after the requested time
		iterKeyframeAfter = getUpperBound(timeMs);

		// check if the time is after the last key frame
		if(iterKeyframeAfter == m_keyFrames.end())
		{
			// return the last key frame
			--iterKeyframeAfter;

			frame = *(*iterKeyframeAfter);
								
			return true;
		}

		// check if the time is before the first key frame
		if(iterKeyframeAfter == m_keyFrames.begin())
		{
			// return the first key frame
			frame = *(*iterKeyframeAfter);			

			return true;
		}

		// get the key frame before the requested one
		iterKeyframeBefore = iterKeyframeAfter;
		--iterKeyframeBefore;

		// get the two key frame
		KeyFrame& keyframeBefore = *(*iterKeyframeBefore);
		KeyFrame& keyframeAfter = *(*iterKeyframeAfter);

		// blend between the two key frames
		//Note: use interp type of compute time blend pct, and use linear for value interp, and should take care of rotation
		frame.interpolateValueFrom(keyframeBefore, keyframeAfter);
		
		return true;
	}
Exemple #11
0
void
KnobGui::onSetKeyActionTriggered()
{
    QAction* action = qobject_cast<QAction*>( sender() );

    assert(action);
    int dim = action->data().toInt();
    KnobPtr knob = getKnob();

    assert( knob->getHolder()->getApp() );
    //get the current time on the global timeline
    SequenceTime time = knob->getHolder()->getApp()->getTimeLine()->currentFrame();
    AddKeysCommand::KeysToAddList toAdd;
    KnobGuiPtr thisShared = shared_from_this();
    for (int i = 0; i < knob->getDimension(); ++i) {
        if ( (dim == -1) || (i == dim) ) {
            std::list<boost::shared_ptr<CurveGui> > curves = getGui()->getCurveEditor()->findCurve(thisShared, i);
            for (std::list<boost::shared_ptr<CurveGui> >::iterator it = curves.begin(); it != curves.end(); ++it) {
                AddKeysCommand::KeyToAdd keyToAdd;
                KeyFrame kf;
                kf.setTime(time);
                Knob<int>* isInt = dynamic_cast<Knob<int>*>( knob.get() );
                Knob<bool>* isBool = dynamic_cast<Knob<bool>*>( knob.get() );
                AnimatingKnobStringHelper* isString = dynamic_cast<AnimatingKnobStringHelper*>( knob.get() );
                Knob<double>* isDouble = dynamic_cast<Knob<double>*>( knob.get() );

                if (isInt) {
                    kf.setValue( isInt->getValue(i) );
                } else if (isBool) {
                    kf.setValue( isBool->getValue(i) );
                } else if (isDouble) {
                    kf.setValue( isDouble->getValue(i) );
                } else if (isString) {
                    std::string v = isString->getValue(i);
                    double dv;
                    isString->stringToKeyFrameValue(time, ViewIdx(0), v, &dv);
                    kf.setValue(dv);
                }

                keyToAdd.keyframes.push_back(kf);
                keyToAdd.curveUI = *it;
                keyToAdd.knobUI = thisShared;
                keyToAdd.dimension = i;
                toAdd.push_back(keyToAdd);
            }
        }
    }
    pushUndoCommand( new AddKeysCommand(getGui()->getCurveEditor()->getCurveWidget(), toAdd) );
}
 bool LoopClosing::ComputeSim3()
 {
     // For each consistent loop candidate we try to compute a Sim3
     
     const int nInitialCandidates = mvpEnoughConsistentCandidates.size();
     
     // We compute first ORB matches for each candidate
     // If enough matches are found, we setup a Sim3Solver
     ORBmatcher matcher(0.75,true);
     
     vector<Sim3Solver*> vpSim3Solvers;
     vpSim3Solvers.resize(nInitialCandidates);
     
     vector<vector<MapPoint*> > vvpMapPointMatches;
     vvpMapPointMatches.resize(nInitialCandidates);
     
     vector<bool> vbDiscarded;
     vbDiscarded.resize(nInitialCandidates);
     
     int nCandidates=0; //candidates with enough matches
     
     for(int i=0; i<nInitialCandidates; i++)
     {
         KeyFrame* pKF = mvpEnoughConsistentCandidates[i];
         
         // avoid that local mapping erase it while it is being processed in this thread
         pKF->SetNotErase();
         
         if(pKF->isBad())
         {
             vbDiscarded[i] = true;
             continue;
         }
         
         int nmatches = matcher.SearchByBoW(mpCurrentKF,pKF,vvpMapPointMatches[i]);
         
         if(nmatches<20)
         {
             vbDiscarded[i] = true;
             continue;
         }
         else
         {
             Sim3Solver* pSolver = new Sim3Solver(mpCurrentKF,pKF,vvpMapPointMatches[i]);
             pSolver->SetRansacParameters(0.99,20,300);
             vpSim3Solvers[i] = pSolver;
         }
         
         nCandidates++;
     }
     
     bool bMatch = false;
     
     // Perform alternatively RANSAC iterations for each candidate
     // until one is succesful or all fail
     while(nCandidates>0 && !bMatch)
     {
         for(int i=0; i<nInitialCandidates; i++)
         {
             if(vbDiscarded[i])
                 continue;
             
             KeyFrame* pKF = mvpEnoughConsistentCandidates[i];
             
             // Perform 5 Ransac Iterations
             vector<bool> vbInliers;
             int nInliers;
             bool bNoMore;
             
             Sim3Solver* pSolver = vpSim3Solvers[i];
             cv::Mat Scm  = pSolver->iterate(5,bNoMore,vbInliers,nInliers);
             
             // If Ransac reachs max. iterations discard keyframe
             if(bNoMore)
             {
                 vbDiscarded[i]=true;
                 nCandidates--;
             }
             
             // If RANSAC returns a Sim3, perform a guided matching and optimize with all correspondences
             if(!Scm.empty())
             {
                 vector<MapPoint*> vpMapPointMatches(vvpMapPointMatches[i].size(), static_cast<MapPoint*>(NULL));
                 for(size_t j=0, jend=vbInliers.size(); j<jend; j++)
                 {
                     if(vbInliers[j])
                         vpMapPointMatches[j]=vvpMapPointMatches[i][j];
                 }
                 
                 cv::Mat R = pSolver->GetEstimatedRotation();
                 cv::Mat t = pSolver->GetEstimatedTranslation();
                 const float s = pSolver->GetEstimatedScale();
                 matcher.SearchBySim3(mpCurrentKF,pKF,vpMapPointMatches,s,R,t,7.5);
                 
                 
                 g2o::Sim3 gScm(Converter::toMatrix3d(R),Converter::toVector3d(t),s);
                 const int nInliers = Optimizer::OptimizeSim3(mpCurrentKF, pKF, vpMapPointMatches, gScm, 10);
                 
                 // If optimization is succesful stop ransacs and continue
                 if(nInliers>=20)
                 {
                     bMatch = true;
                     mpMatchedKF = pKF;
                     g2o::Sim3 gSmw(Converter::toMatrix3d(pKF->GetRotation()),Converter::toVector3d(pKF->GetTranslation()),1.0);
                     mg2oScw = gScm*gSmw;
                     mScw = Converter::toCvMat(mg2oScw);
                     
                     mvpCurrentMatchedPoints = vpMapPointMatches;
                     break;
                 }
             }
         }
     }
     
     if(!bMatch)
     {
         for(int i=0; i<nInitialCandidates; i++)
             mvpEnoughConsistentCandidates[i]->SetErase();
         mpCurrentKF->SetErase();
         return false;
     }
     
     // Retrieve MapPoints seen in Loop Keyframe and neighbors
     vector<KeyFrame*> vpLoopConnectedKFs = mpMatchedKF->GetVectorCovisibleKeyFrames();
     vpLoopConnectedKFs.push_back(mpMatchedKF);
     mvpLoopMapPoints.clear();
     for(vector<KeyFrame*>::iterator vit=vpLoopConnectedKFs.begin(); vit!=vpLoopConnectedKFs.end(); vit++)
     {
         KeyFrame* pKF = *vit;
         vector<MapPoint*> vpMapPoints = pKF->GetMapPointMatches();
         for(size_t i=0, iend=vpMapPoints.size(); i<iend; i++)
         {
             MapPoint* pMP = vpMapPoints[i];
             if(pMP)
             {
                 if(!pMP->isBad() && pMP->mnLoopPointForKF!=mpCurrentKF->mnId)
                 {
                     mvpLoopMapPoints.push_back(pMP);
                     pMP->mnLoopPointForKF=mpCurrentKF->mnId;
                 }
             }
         }
     }
     
     // Find more matches projecting with the computed Sim3
     matcher.SearchByProjection(mpCurrentKF, mScw, mvpLoopMapPoints, mvpCurrentMatchedPoints,10);
     
     // If enough matches accept Loop
     int nTotalMatches = 0;
     for(size_t i=0; i<mvpCurrentMatchedPoints.size(); i++)
     {
         if(mvpCurrentMatchedPoints[i])
             nTotalMatches++;
     }
     
     if(nTotalMatches>=40)
     {
         for(int i=0; i<nInitialCandidates; i++)
             if(mvpEnoughConsistentCandidates[i]!=mpMatchedKF)
                 mvpEnoughConsistentCandidates[i]->SetErase();
         return true;
     }
     else
     {
         for(int i=0; i<nInitialCandidates; i++)
             mvpEnoughConsistentCandidates[i]->SetErase();
         mpCurrentKF->SetErase();
         return false;
     }
     
 }
Exemple #13
0
// Finds nMaxNum closest KeyFrames, within a given distance, within a given region
std::vector<KeyFrame*> MapMakerBase::ClosestKeyFramesWithinDist(KeyFrame& kf, double dThreshDist, unsigned nMaxNum,
    KeyFrameRegion region)
{
  std::vector<KeyFrame*> vResult;

  std::vector<std::pair<double, KeyFrame*>> vpDistsAndKeyFrames;
  MultiKeyFrame& parent = *kf.mpParent;

  if (region == KF_ONLY_SELF)  // Only search through parent's keyframes
  {
    for (KeyFramePtrMap::iterator it = parent.mmpKeyFrames.begin(); it != parent.mmpKeyFrames.end(); it++)
    {
      KeyFrame& currentKF = *(it->second);
      if (&currentKF == &kf)
        continue;

      double dDist = kf.Distance(currentKF);
      if (dDist <= dThreshDist)
      {
        vpDistsAndKeyFrames.push_back(std::make_pair(dDist, &currentKF));
      }
    }
  }
  else  // Otherwise search all keyframes in the map
  {
    for (MultiKeyFramePtrList::iterator it = mMap.mlpMultiKeyFrames.begin(); it != mMap.mlpMultiKeyFrames.end(); ++it)
    {
      MultiKeyFrame& mkf = *(*it);

      if (&mkf == &parent && region == KF_ONLY_OTHER)
        continue;

      for (KeyFramePtrMap::iterator jit = mkf.mmpKeyFrames.begin(); jit != mkf.mmpKeyFrames.end(); ++jit)
      {
        KeyFrame& currentKF = *(jit->second);
        if (&currentKF == &kf)
          continue;

        double dDist = kf.Distance(currentKF);
        if (dDist <= dThreshDist)
        {
          vpDistsAndKeyFrames.push_back(std::make_pair(dDist, &currentKF));
        }
      }
    }
  }

  if (!vpDistsAndKeyFrames.empty())
  {
    if (nMaxNum > vpDistsAndKeyFrames.size())  // if we expect too many neighbors
      nMaxNum = vpDistsAndKeyFrames.size();    // reduce number that will be returned

    // Sort the first nMaxNum entries by score
    std::partial_sort(vpDistsAndKeyFrames.begin(), vpDistsAndKeyFrames.begin() + nMaxNum, vpDistsAndKeyFrames.end());

    for (unsigned int i = 0; i < nMaxNum; i++)
      vResult.push_back(vpDistsAndKeyFrames[i].second);
  }

  return vResult;
}
Exemple #14
0
void PluginSet::paste_keyframes(int64_t start, 
	int64_t length, 
	FileXML *file, 
	int default_only,
	int active_only)
{
	int result = 0;
	int first_keyframe = 1;
	Plugin *current;


	while(!result)
	{
		result = file->read_tag();

		if(!result)
		{
			if(file->tag.title_is("/PLUGINSET"))
				result = 1;
			else
			if(file->tag.title_is("KEYFRAME"))
			{
				int64_t position = file->tag.get_property("POSITION", 0);
				if(first_keyframe && default_only)
				{
					position = start;
				}
				else
				{
					position += start;
				}

// Get plugin owning keyframe
				for(current = (Plugin*)last; 
					current;
					current = (Plugin*)PREVIOUS)
				{
// We want keyframes to exist beyond the end of the last plugin to
// make editing intuitive, but it will always be possible to 
// paste keyframes from one plugin into an incompatible plugin.
					if(position >= current->startproject)
					{
						KeyFrame *keyframe = 0;
						if(file->tag.get_property("DEFAULT", 0) || default_only)
						{
							keyframe = (KeyFrame*)current->keyframes->default_auto;
						}
						else
						if(!default_only)
						{
							keyframe = 
								(KeyFrame*)current->keyframes->insert_auto(position);
						}

						if(keyframe)
						{
							keyframe->load(file);
							keyframe->position = position;
						}
						break;
					}
				}

				first_keyframe = 0;
			}
		}
	}
}
Exemple #15
0
 bool operator()(const KeyFrame & f)
 {
     return f.getTime() == _t;
 }
 void LocalMapping::SearchInNeighbors()
 {
     // Retrieve neighbor keyframes
     vector<KeyFrame*> vpNeighKFs = mpCurrentKeyFrame->GetBestCovisibilityKeyFrames(20);
     vector<KeyFrame*> vpTargetKFs;
     for(vector<KeyFrame*>::iterator vit=vpNeighKFs.begin(), vend=vpNeighKFs.end(); vit!=vend; vit++)
     {
         KeyFrame* pKFi = *vit;
         if(pKFi->isBad() || pKFi->mnFuseTargetForKF == mpCurrentKeyFrame->mnId)
             continue;
         vpTargetKFs.push_back(pKFi);
         pKFi->mnFuseTargetForKF = mpCurrentKeyFrame->mnId;
         
         // Extend to some second neighbors
         vector<KeyFrame*> vpSecondNeighKFs = pKFi->GetBestCovisibilityKeyFrames(5);
         for(vector<KeyFrame*>::iterator vit2=vpSecondNeighKFs.begin(), vend2=vpSecondNeighKFs.end(); vit2!=vend2; vit2++)
         {
             KeyFrame* pKFi2 = *vit2;
             if(pKFi2->isBad() || pKFi2->mnFuseTargetForKF==mpCurrentKeyFrame->mnId || pKFi2->mnId==mpCurrentKeyFrame->mnId)
                 continue;
             vpTargetKFs.push_back(pKFi2);
         }
     }
     
     
     // Search matches by projection from current KF in target KFs
     ORBmatcher matcher(0.6);
     vector<MapPoint*> vpMapPointMatches = mpCurrentKeyFrame->GetMapPointMatches();
     for(vector<KeyFrame*>::iterator vit=vpTargetKFs.begin(), vend=vpTargetKFs.end(); vit!=vend; vit++)
     {
         KeyFrame* pKFi = *vit;
         
         matcher.Fuse(pKFi,vpMapPointMatches);
     }
     
     // Search matches by projection from target KFs in current KF
     vector<MapPoint*> vpFuseCandidates;
     vpFuseCandidates.reserve(vpTargetKFs.size()*vpMapPointMatches.size());
     
     for(vector<KeyFrame*>::iterator vitKF=vpTargetKFs.begin(), vendKF=vpTargetKFs.end(); vitKF!=vendKF; vitKF++)
     {
         KeyFrame* pKFi = *vitKF;
         
         vector<MapPoint*> vpMapPointsKFi = pKFi->GetMapPointMatches();
         
         for(vector<MapPoint*>::iterator vitMP=vpMapPointsKFi.begin(), vendMP=vpMapPointsKFi.end(); vitMP!=vendMP; vitMP++)
         {
             MapPoint* pMP = *vitMP;
             if(!pMP)
                 continue;
             if(pMP->isBad() || pMP->mnFuseCandidateForKF == mpCurrentKeyFrame->mnId)
                 continue;
             pMP->mnFuseCandidateForKF = mpCurrentKeyFrame->mnId;
             vpFuseCandidates.push_back(pMP);
         }
     }
     
     matcher.Fuse(mpCurrentKeyFrame,vpFuseCandidates);
     
     
     // Update points
     vpMapPointMatches = mpCurrentKeyFrame->GetMapPointMatches();
     for(size_t i=0, iend=vpMapPointMatches.size(); i<iend; i++)
     {
         MapPoint* pMP=vpMapPointMatches[i];
         if(pMP)
         {
             if(!pMP->isBad())
             {
                 pMP->ComputeDistinctiveDescriptors();
                 pMP->UpdateNormalAndDepth();
             }
         }
     }
     
     // Update connections in covisibility graph
     mpCurrentKeyFrame->UpdateConnections();
 }
// _LayoutBackgroundSound
status_t
CollectingPlaylist::_LayoutBackgroundSound(const ServerObjectManager* library)
{
	// find the background sound clip, if we are supposed to have one
	Clip* soundClip = NULL;
	BString soundClipID = SoundClipID();

	if (soundClipID.Length() > 0) {
		soundClip = dynamic_cast<Clip*>(library->FindObject(
			soundClipID.String()));
		if (!soundClip) {
			print_error("CollectingPlaylist::_LayoutBackgroundSound() - "
				"didn't background sound clip: %s (ignoring)\n",
				soundClipID.String());
			return B_OK;
		}
	} else {
		// no background sound configured
		return B_OK;
	}

	float volume = Value(PROPERTY_BACKGROUND_SOUND_VOLUME, (float)1.0);

	uint64 duration = Duration();
	uint64 startFrame = 0;
	while (startFrame < duration) {
		ClipPlaylistItem* item = new (nothrow) ClipPlaylistItem(soundClip);
		if (!item) {
			print_error("CollectingPlaylist::_LayoutBackgroundSound() - "
				"no memory to create ClipPlaylistItem\n");
			return B_NO_MEMORY;
		}
	
		uint64 itemDuration = soundClip->Duration();
		uint64 maxItemDuration = duration - startFrame;

		if (startFrame == 0 && itemDuration >= maxItemDuration) {
			// one item as long as first track or longer
			// cut off
			itemDuration = maxItemDuration;
			// fade in + fade out
			PropertyAnimator* animator = item->AlphaAnimator();
			if (animator) {
				// remove all keyframes to get a clean start
				animator->MakeEmpty();
				KeyFrame* first = animator->InsertKeyFrameAt(0LL);
				KeyFrame* fadeEnd = animator->InsertKeyFrameAt(3);
				KeyFrame* fadeStart = animator->InsertKeyFrameAt(
					itemDuration - 4);
				KeyFrame* last = animator->InsertKeyFrameAt(itemDuration - 1);

				if (!first || !fadeEnd || !fadeStart || !last) {
					delete item;
					print_error("CollectingPlaylist::_LayoutBackgroundSound()"
						" - no memory to add fade keyframes\n");
					return B_NO_MEMORY;
				}

				first->SetScale(0.0);
				fadeEnd->SetScale(volume);
				fadeStart->SetScale(volume);
				last->SetScale(0.0);
			}
		} else if (startFrame == 0) {
			// first item, more to come
			// fade in
			PropertyAnimator* animator = item->AlphaAnimator();
			if (animator) {
				// remove all keyframes to get a clean start
				animator->MakeEmpty();
				KeyFrame* first = animator->InsertKeyFrameAt(0LL);
				KeyFrame* fadeEnd = animator->InsertKeyFrameAt(3);
				KeyFrame* last = animator->InsertKeyFrameAt(itemDuration - 1);

				if (!first || !fadeEnd || !last) {
					delete item;
					print_error("CollectingPlaylist::_LayoutBackgroundSound()"
						" - no memory to add fade keyframes\n");
					return B_NO_MEMORY;
				}

				first->SetScale(0.0);
				fadeEnd->SetScale(volume);
				last->SetScale(volume);
			}
		} else if (itemDuration >= maxItemDuration) {
			// last item
			// cut off
			itemDuration = maxItemDuration;
			// fade out
			PropertyAnimator* animator = item->AlphaAnimator();
			if (animator) {
				// remove all keyframes to get a clean start
				animator->MakeEmpty();
				KeyFrame* first = animator->InsertKeyFrameAt(0LL);
				KeyFrame* fadeStart = animator->InsertKeyFrameAt(
					itemDuration - 4);
				KeyFrame* last = animator->InsertKeyFrameAt(itemDuration - 1);

				if (!first || !fadeStart || !last) {
					delete item;
					print_error("CollectingPlaylist::_LayoutBackgroundSound()"
						" - no memory to add fade keyframes\n");
					return B_NO_MEMORY;
				}

				first->SetScale(volume);
				fadeStart->SetScale(volume);
				last->SetScale(0.0);
			}
		} else {
			// any remaining item
			PropertyAnimator* animator = item->AlphaAnimator();
			if (animator) {
				// remove all keyframes to get a clean start
				animator->MakeEmpty();
				KeyFrame* first = animator->InsertKeyFrameAt(0LL);

				if (!first) {
					delete item;
					print_error("CollectingPlaylist::_LayoutBackgroundSound()"
						" - no memory to add fade keyframes\n");
					return B_NO_MEMORY;
				}

				first->SetScale(volume);
			}
		}

		item->SetStartFrame(startFrame);
		item->SetDuration(itemDuration);
		item->SetTrack(1);

		if (!AddItem(item)) {
			delete item;
			print_error("CollectingPlaylist::_LayoutBackgroundSound() - "
				"no memory to add ClipPlaylistItem\n");
			return B_NO_MEMORY;
		}

		startFrame += itemDuration;
	}

	return B_OK;
}
Exemple #18
0
// Tries to make a new map point out of a single candidate point
// by searching for that point in another keyframe, and triangulating
// if a match is found.
bool MapMakerServerBase::AddPointEpipolar(KeyFrame& kfSrc, KeyFrame& kfTarget, int nLevel, int nCandidate)
{
  // debug
  static GVars3::gvar3<int> gvnCrossCamera("CrossCamera", 1, GVars3::HIDDEN | GVars3::SILENT);
  if (!*gvnCrossCamera && kfSrc.mCamName != kfTarget.mCamName)
    return false;

  TaylorCamera& cameraSrc = mmCameraModels[kfSrc.mCamName];
  TaylorCamera& cameraTarget = mmCameraModels[kfTarget.mCamName];

  int nLevelScale = LevelScale(nLevel);
  Candidate& candidate = kfSrc.maLevels[nLevel].vCandidates[nCandidate];
  CVD::ImageRef irLevelPos = candidate.irLevelPos;
  TooN::Vector<2> v2RootPos = LevelZeroPos(irLevelPos, nLevel);  // The pixel coords of the candidate at level zero

  TooN::Vector<3> v3Ray_SC =
    cameraSrc.UnProject(v2RootPos);  // The pixel coords unprojected into a 3d half-line in the source kf frame
  TooN::Vector<3> v3LineDirn_TC =
    kfTarget.mse3CamFromWorld.get_rotation() * (kfSrc.mse3CamFromWorld.get_rotation().inverse() *
        v3Ray_SC);  // The direction of that line in the target kf frame
  TooN::Vector<3> v3CamCenter_TC =
    kfTarget.mse3CamFromWorld *
    kfSrc.mse3CamFromWorld.inverse().get_translation();  // The position of the source kf in the target kf frame
  TooN::Vector<3> v3CamCenter_SC =
    kfSrc.mse3CamFromWorld *
    kfTarget.mse3CamFromWorld.inverse().get_translation();  // The position of the target kf in the source kf frame

  double dMaxEpiAngle = M_PI / 3;  // the maximum angle spanned by two view rays allowed
  double dMinEpiAngle = 0.05;      // the minimum angle allowed

  // Want to figure out the min and max depths allowed on the source ray, which will be determined by the minimum and
  // maximum allowed epipolar angle
  // See diagram below, which shows the min and max epipolar angles.
  /*
   *              /\
   *             / m\
   *            /  i \
   *           /`. n  \
   *          / m `.   \
   *         /  a   `.  \
   *        /   x     `. \
   *       /____________`.\
   *    Source           Target
   */

  double dSeparationDist = norm(v3CamCenter_SC);
  double dSourceAngle =
    acos((v3CamCenter_SC * v3Ray_SC) / dSeparationDist);  // v3Ray_SC is unit length so don't have to divide

  double dMinTargetAngle = M_PI - dSourceAngle - dMaxEpiAngle;
  double dStartDepth = dSeparationDist * sin(dMinTargetAngle) / sin(dMaxEpiAngle);

  double dMaxTargetAngle = M_PI - dSourceAngle - dMinEpiAngle;
  double dEndDepth = dSeparationDist * sin(dMaxTargetAngle) / sin(dMinEpiAngle);

  if (dStartDepth < 0.2)  // don't bother looking too close
    dStartDepth = 0.2;

  ROS_DEBUG_STREAM("dStartDepth: " << dStartDepth << " dEndDepth: " << dEndDepth);

  TooN::Vector<3> v3RayStart_TC =
    v3CamCenter_TC + dStartDepth * v3LineDirn_TC;  // The start of the epipolar line segment in the target kf frame
  TooN::Vector<3> v3RayEnd_TC =
    v3CamCenter_TC + dEndDepth * v3LineDirn_TC;  // The end of the epipolar line segment in the target kf frame

  // Project epipolar line segment start and end points onto unit sphere and check for minimum distance between them
  TooN::Vector<3> v3A = v3RayStart_TC;
  normalize(v3A);
  TooN::Vector<3> v3B = v3RayEnd_TC;
  normalize(v3B);
  TooN::Vector<3> v3BetweenEndpoints = v3A - v3B;

  if (v3BetweenEndpoints * v3BetweenEndpoints < 0.00000001)
  {
    ROS_DEBUG_STREAM("MapMakerServerBase: v3BetweenEndpoints too small.");
    return false;
  }

  // Now we want to construct a bunch of hypothetical point locations, so we can warp the source patch
  // into the target KF and look for a match. To do this, need to partition the epipolar arc in the target
  // KF equally, rather than the source ray equally. The epipolar arc lies at the intersection of the epipolar
  // plane and the unit circle of the target KF. We will construct a matrix that projects 3-vectors onto
  // the epipolar plane, and use it to define the start and stop coordinates of a unit circle by
  // projecting the ray start and ray end vectors. Then it's just a matter of partitioning the unit circle, and
  // projecting each partition point onto the source ray (keeping in mind that the source ray is in the
  // epipolar plane too).

  // Find the normal of the epipolar plane
  TooN::Vector<3> v3PlaneNormal = v3A ^ v3B;
  normalize(v3PlaneNormal);
  TooN::Vector<3> v3PlaneI =
    v3A;  // Lets call the vector we got from the start of the epipolar line segment the new coordinate frame's x axis
  TooN::Vector<3> v3PlaneJ = v3PlaneNormal ^ v3PlaneI;  // Get the y axis

  // This will convert a 3D point to the epipolar plane's coordinate frame
  TooN::Matrix<3> m3ToPlaneCoords;
  m3ToPlaneCoords[0] = v3PlaneI;
  m3ToPlaneCoords[1] = v3PlaneJ;
  m3ToPlaneCoords[2] = v3PlaneNormal;

  TooN::Vector<2> v2PlaneB = (m3ToPlaneCoords * v3B).slice<0, 2>();  // The vector we got from the end of the epipolar
  // line segment, in epipolar plane coordinates
  TooN::Vector<2> v2PlaneI = TooN::makeVector(1, 0);

  double dMaxAngleAlongCircle =
    acos(v2PlaneB * v2PlaneI);  // The angle between point B (now a 2D point in the plane) and the x axis

  // For stepping along the circle
  double dAngleStep = cameraTarget.OnePixelAngle() * LevelScale(nLevel) * 3;
  int nSteps = ceil(dMaxAngleAlongCircle / dAngleStep);
  dAngleStep = dMaxAngleAlongCircle / nSteps;

  TooN::Vector<2> v2RayStartInPlane = (m3ToPlaneCoords * v3RayStart_TC).slice<0, 2>();
  TooN::Vector<2> v2RayEndInPlane = (m3ToPlaneCoords * v3RayEnd_TC).slice<0, 2>();
  TooN::Vector<2> v2RayDirInPlane = v2RayEndInPlane - v2RayStartInPlane;
  normalize(v2RayDirInPlane);

  // First in world frame, second in camera frame
  std::vector<std::pair<TooN::Vector<3>, TooN::Vector<3>>> vMapPointPositions;
  TooN::SE3<> se3WorldFromTargetCam = kfTarget.mse3CamFromWorld.inverse();
  for (int i = 0; i < nSteps + 1; ++i)  // stepping along circle
  {
    double dAngle = i * dAngleStep;                                  // current angle
    TooN::Vector<2> v2CirclePoint = TooN::makeVector(cos(dAngle), sin(dAngle));  // point on circle

    // Backproject onto view ray, this is the depth along view ray where we intersect
    double dAlpha = (v2RayStartInPlane[0] * v2CirclePoint[1] - v2RayStartInPlane[1] * v2CirclePoint[0]) /
                    (v2RayDirInPlane[1] * v2CirclePoint[0] - v2RayDirInPlane[0] * v2CirclePoint[1]);

    TooN::Vector<3> v3PointPos_TC = v3RayStart_TC + dAlpha * v3LineDirn_TC;
    TooN::Vector<3> v3PointPos = se3WorldFromTargetCam * v3PointPos_TC;
    vMapPointPositions.push_back(std::make_pair(v3PointPos, v3PointPos_TC));
  }

  // This will be the map point that we place at the different depths in order to generate warped patches
  MapPoint point;
  point.mpPatchSourceKF = &kfSrc;
  point.mnSourceLevel = nLevel;
  point.mv3Normal_NC = TooN::makeVector(0, 0, -1);
  point.mirCenter = irLevelPos;
  point.mv3Center_NC = cameraSrc.UnProject(v2RootPos);
  point.mv3OneRightFromCenter_NC = cameraSrc.UnProject(v2RootPos + vec(CVD::ImageRef(nLevelScale, 0)));
  point.mv3OneDownFromCenter_NC = cameraSrc.UnProject(v2RootPos + vec(CVD::ImageRef(0, nLevelScale)));

  normalize(point.mv3Center_NC);
  normalize(point.mv3OneRightFromCenter_NC);
  normalize(point.mv3OneDownFromCenter_NC);

  PatchFinder finder;
  int nMaxZMSSD = finder.mnMaxSSD + 1;
  int nBestZMSSD = nMaxZMSSD;
  int nBest = -1;
  TooN::Vector<2> v2BestMatch = TooN::Zeros;

  std::vector<std::tuple<int, int, TooN::Vector<2>>> vScoresIndicesBestMatches;

  for (unsigned i = 0; i < vMapPointPositions.size(); ++i)  // go through all our hypothesized map points
  {
    point.mv3WorldPos = vMapPointPositions[i].first;
    point.RefreshPixelVectors();

    TooN::Vector<2> v2Image = cameraTarget.Project(vMapPointPositions[i].second);

    if (cameraTarget.Invalid())
      continue;

    if (!kfTarget.maLevels[0].image.in_image(CVD::ir(v2Image)))
      continue;

    // Check if projected point is in a masked portion of the target keyframe
    if (kfTarget.maLevels[0].mask.totalsize() > 0 && kfTarget.maLevels[0].mask[CVD::ir(v2Image)] == 0)
      continue;

    TooN::Matrix<2> m2CamDerivs = cameraTarget.GetProjectionDerivs();

    int nSearchLevel = finder.CalcSearchLevelAndWarpMatrix(point, kfTarget.mse3CamFromWorld, m2CamDerivs);
    if (nSearchLevel == -1)
      continue;

    finder.MakeTemplateCoarseCont(point);

    if (finder.TemplateBad())
      continue;

    int nScore;
    bool bExhaustive =
      false;  // Should we do an exhaustive search of the target area? Should maybe make this into a param
    bool bFound = finder.FindPatchCoarse(CVD::ir(v2Image), kfTarget, 3, nScore, bExhaustive);

    if (!bFound)
      continue;

    vScoresIndicesBestMatches.push_back(std::make_tuple(nScore, i, finder.GetCoarsePosAsVector()));

    if (nScore < nBestZMSSD)
    {
      nBestZMSSD = nScore;
      nBest = i;
      v2BestMatch = finder.GetCoarsePosAsVector();
    }
  }

  if (nBest == -1)
  {
    ROS_DEBUG_STREAM("No match found.");
    return false;
  }

  std::sort(vScoresIndicesBestMatches.begin(), vScoresIndicesBestMatches.end(), compScores);

  // We want matches that are unambigous, so if there are many good matches along the view ray,
  // we can't say for certain where the best one really is. Therefore, implement the following rule:
  // Best zmssd has to be 10% better than nearest other, unless that nearest other is 1 index away
  // from best

  int nResizeTo = 1;
  for (unsigned i = 1; i < vScoresIndicesBestMatches.size(); ++i)
  {
    if (std::get<0>(vScoresIndicesBestMatches[i]) > nBestZMSSD * 0.9)  // within 10% of best
      nResizeTo++;
  }

  // Too many high scoring points, since the best can be within 10% of at most two other points.
  // We can't be certain of what is best, get out of here
  if (nResizeTo > 3)
    return false;

  vScoresIndicesBestMatches.resize(nResizeTo);  // chop!

  // All the points left in vScoresIndicesBestMatches should be within 1 idx of best, otherwise our matches are ambigous
  // Test index distance:
  for (unsigned i = 1; i < vScoresIndicesBestMatches.size(); ++i)
  {
    if (abs(std::get<1>(vScoresIndicesBestMatches[i]) - nBest) > 1)  // bad, index too far away, get out of here
      return false;
  }

  // Now all the points in vScoresIndicesBestMatches can be considered potential matches

  TooN::Vector<2> v2SubPixPos = TooN::makeVector(-1, -1);
  bool bGotGoodSubpix = false;
  for (unsigned i = 0; i < vScoresIndicesBestMatches.size(); ++i)  // go through all potential good matches
  {
    int nCurrBest = std::get<1>(vScoresIndicesBestMatches[i]);
    TooN::Vector<2> v2CurrBestMatch = std::get<2>(vScoresIndicesBestMatches[i]);

    point.mv3WorldPos = vMapPointPositions[nCurrBest].first;
    point.RefreshPixelVectors();

    cameraTarget.Project(vMapPointPositions[nCurrBest].second);
    TooN::Matrix<2> m2CamDerivs = cameraTarget.GetProjectionDerivs();

    finder.CalcSearchLevelAndWarpMatrix(point, kfTarget.mse3CamFromWorld, m2CamDerivs);
    finder.MakeTemplateCoarseCont(point);
    finder.SetSubPixPos(v2CurrBestMatch);

    // Try to get subpixel convergence
    bool bSubPixConverges = finder.IterateSubPixToConvergence(kfTarget, 10);

    if (!bSubPixConverges)
      continue;

    // First one to make it here wins. Keep in mind that vScoresIndicesBestMatches is ordered by
    // score, so we're trying the points in descent order of potential
    bGotGoodSubpix = true;
    v2SubPixPos = finder.GetSubPixPos();
    break;
  }

  // None of the candidates had subpix converge? Bad match...
  if (!bGotGoodSubpix)
    return false;

  // Now triangulate the 3d point...
  TooN::Vector<3> v3New;
  v3New = kfTarget.mse3CamFromWorld.inverse() *
          ReprojectPoint(kfSrc.mse3CamFromWorld * kfTarget.mse3CamFromWorld.inverse(), cameraSrc.UnProject(v2RootPos),
                         cameraTarget.UnProject(v2SubPixPos));

  MapPoint* pPointNew = new MapPoint;

  pPointNew->mv3WorldPos = v3New;

  // Patch source stuff:
  pPointNew->mpPatchSourceKF = &kfSrc;
  pPointNew->mnSourceLevel = nLevel;
  pPointNew->mv3Normal_NC = TooN::makeVector(0, 0, -1);
  pPointNew->mirCenter = irLevelPos;
  pPointNew->mv3Center_NC = cameraSrc.UnProject(v2RootPos);
  pPointNew->mv3OneRightFromCenter_NC = cameraSrc.UnProject(v2RootPos + vec(CVD::ImageRef(nLevelScale, 0)));
  pPointNew->mv3OneDownFromCenter_NC = cameraSrc.UnProject(v2RootPos + vec(CVD::ImageRef(0, nLevelScale)));

  normalize(pPointNew->mv3Center_NC);
  normalize(pPointNew->mv3OneDownFromCenter_NC);
  normalize(pPointNew->mv3OneRightFromCenter_NC);

  pPointNew->RefreshPixelVectors();

  Measurement* pMeasSrc = new Measurement;
  pMeasSrc->eSource = Measurement::SRC_ROOT;
  pMeasSrc->v2RootPos = v2RootPos;
  pMeasSrc->nLevel = nLevel;
  pMeasSrc->bSubPix = true;

  Measurement* pMeasTarget = new Measurement;
  *pMeasTarget = *pMeasSrc;  // copy data
  pMeasTarget->eSource = Measurement::SRC_EPIPOLAR;
  pMeasTarget->v2RootPos = v2SubPixPos;

  // Record map point and its measurement in the right places
  kfSrc.AddMeasurement(pPointNew, pMeasSrc);
  kfTarget.AddMeasurement(pPointNew, pMeasTarget);

  // kfSrc.mmpMeasurements[pPointNew] = pMeasSrc;
  // kfTarget.mmpMeasurements[pPointNew] = pMeasTarget;
  // pPointNew->mMMData.spMeasurementKFs.insert(&kfSrc);
  // pPointNew->mMMData.spMeasurementKFs.insert(&kfTarget);

  mMap.mlpPoints.push_back(pPointNew);
  mlpNewQueue.push_back(pPointNew);

  return true;
}
Exemple #19
0
int main(int argc, char** argv)
{
    VideoCapture cap(string(KAI_PATH).append("shield_vid.mp4"));
//    VideoCapture cap(string(KAI_PATH).append("indoor.mov"));
//    VideoCapture cap(string(MOHIT_PATH).append("indoor.avi"));
//    VideoCapture cap("/Users/MohitSridhar/Downloads/kitti_youtube.avi");
//    VideoCapture cap("/Users/MohitSridhar/Downloads/VID_20150530_120719.mp4");
    
    if (!cap.isOpened())
    {
        cout << "failed to open video file" << endl;
        return -1;
    }

    // Initialize visualizer and load initial map
    Ptr<VisualizerListener> visualizerListener = new VisualizerListener;
    InitializeVisualizer();
    RunVisualizationOnly();
    
    // Initialize SLAM
    Mat frame;
    Size size(640, 480);
    vslam::VSlam slam = vslam::VSlam();

    while (true) {
        cap >> frame;
        if (frame.empty()) {
            break;
        }
        
        resize(frame, frame, size);
        
        clock_t start = clock();
        slam.ProcessFrame(frame);
        clock_t end = clock();
        
        double processFrameDuration = (end - start) / (double) CLOCKS_PER_SEC;
        cout << "processFrameDuration: " << processFrameDuration << endl;
        
        if (waitKey(30) == 27) {
            break;
        }
        
        // Update visualizer
        visualizerListener->update(slam.GetKeyFrames(), slam.GetCameraRot().back(), slam.
                                   GetCameraPose().back());
        RunVisualizationOnly();
        
        // Draw translation and rotation information
        Augmentor augmentor;
        KeyFrame currKeyFrame = slam.GetCurrKeyFrame();
        Mat translationMatrix = currKeyFrame.GetTranslation();
        augmentor.DisplayTranslation(frame, translationMatrix);
        Mat rotationMatrix = currKeyFrame.GetRotation();
        augmentor.DisplayRotation(frame, rotationMatrix);
        
        // Draw keypoints
        KeypointArray keypoints = currKeyFrame.GetTrackedKeypoints();
        Mat trackedFeatures;
        Scalar kpColor = Scalar(255, 0, 0);
        drawKeypoints(frame, keypoints, trackedFeatures, kpColor);
        imshow("Tracked Features", trackedFeatures);
    }
    
    waitKey(0);

    WaitForVisualizationThread();
    return 0;
}
// ValidateItemLayout
void
SlideShowPlaylist::ValidateItemLayout()
{
	int64 duration = Value(PROPERTY_DURATION, (int64)0);
	if (duration == 0)
		return;

	int64 transitionDuration
		= Value(PROPERTY_TRANSITION_DURATION, (int64)0);
	// TODO: transition mode...

	int32 count = CountItems();

	BList managedItems;

	int64 minDuration = 0;
	int64 minItemDuration = transitionDuration * 3;
	int64 fixedItemsDuration = 0;
	int64 maxDuration = 0;
	int32 variableItemCount = 0;
	for (int32 i = 0; i < count; i++) {
		PlaylistItem* item = ItemAtFast(i);
		if (item->Track() > 1)
			// not a "managed" item
			continue;

		managedItems.AddItem(item);

		if (item->HasMaxDuration()) {
			int64 maxItemDuration = item->MaxDuration();
			minDuration += maxItemDuration;
			fixedItemsDuration += maxItemDuration;
			if (minItemDuration > maxItemDuration)
				minItemDuration = maxItemDuration;
		} else {
			minDuration += 3 * transitionDuration;
			variableItemCount++;
		}

		maxDuration += item->MaxDuration();
	}

	count = managedItems.CountItems();
	if (count == 0)
		return;

	if (duration < minDuration)
		duration = minDuration;
	if (duration > maxDuration)
		duration = maxDuration;

	// limit transition duration to 1/3 of the minimum item duration
	int64 maxTransitionDuration = minItemDuration / 3;

	if (transitionDuration > maxTransitionDuration)
		transitionDuration = maxTransitionDuration;

	int64 variableItemsDuration = duration - fixedItemsDuration
			+ transitionDuration * (count - variableItemCount);

	int64 startFrame = 0;
	int64 lastVariableStartFrame = 0;
	int32 variableItemIndex = 0;
	for (int32 i = 0; i < count; i++) {
		PlaylistItem* item = (PlaylistItem*)managedItems.ItemAtFast(i);
		// overlapping items
		item->SetClipOffset(0);
		item->SetTrack(i & 1);

		int64 nextStartFrame;
		if (item->HasMaxDuration()) {
			nextStartFrame = startFrame + item->MaxDuration()
								- transitionDuration;
		} else {
			variableItemIndex++;
			int64 nextVariableStartFrame = (variableItemsDuration - transitionDuration)
								* variableItemIndex / variableItemCount;
			nextStartFrame = startFrame + nextVariableStartFrame - lastVariableStartFrame;
			lastVariableStartFrame = nextVariableStartFrame;
		}
		item->SetStartFrame(startFrame);
		item->SetDuration(nextStartFrame - startFrame + transitionDuration);
		startFrame = nextStartFrame;

		// transition
		PropertyAnimator* animator = item->AlphaAnimator();
		if (!animator)
			continue;

		AutoNotificationSuspender _(animator);

		// remove all keyframes to get a clean start
		animator->MakeEmpty();
		KeyFrame* first = animator->InsertKeyFrameAt(0LL);
		KeyFrame* last = animator->InsertKeyFrameAt(item->Duration() - 1);

		if (!first || !last)
			continue;

		first->SetScale(1.0);
		last->SetScale(1.0);

		// transition in top items
		if (transitionDuration > 0 && !(i & 1)) {
			// item on first track, animated opacity property
			if (i > 0) {
				// fade in
				KeyFrame* key = animator->InsertKeyFrameAt(transitionDuration);
				key->SetScale(1.0);
				first->SetScale(0.0);
			}
			
			if (i < count - 1) {
				// fade out
				KeyFrame* key = animator->InsertKeyFrameAt(
									item->Duration() - 1 - transitionDuration);
				key->SetScale(1.0);
				last->SetScale(0.0);
			}
		}
	}
}
Exemple #21
0
	void Puppet::Save(Entity *entity)
	{
		// save to filename
		TiXmlDocument xmlDoc;

		/// TextureAtlas
		if (textureAtlas)
		{
			textureAtlas->Save(&xmlDoc);
		}

		/// Parts
		//TiXmlElement *xmlParts = xmlDoc.FirstChildElement("Parts");
		TiXmlElement xmlParts("Parts");
		SaveParts(&xmlParts, entity);
		xmlDoc.InsertEndChild(xmlParts);


		/// Animations
		TiXmlElement xmlAnimations("Animations");
		{
			/// Animation
			for (std::list<Animation>::iterator i = animations.begin(); i != animations.end(); ++i)
			{
				TiXmlElement xmlAnimation("Animation");

				Animation *animation = &(*i);
                
                XMLFileNode xmlFileNodeKeyFrameAnim(&xmlAnimation);
				animation->Save(&xmlFileNodeKeyFrameAnim);

				/// PartKeyFrames
				for (std::list<Part*>::iterator j = parts.begin(); j != parts.end(); ++j)
				{
					PartKeyFrames *partKeyFrames = animation->GetPartKeyFrames(*j);
					if (partKeyFrames)
					{
						TiXmlElement xmlPartKeyFrames("PartKeyFrames");
						XMLFileNode xmlFileNodePartKeyFrames(&xmlPartKeyFrames);

						partKeyFrames->Save(&xmlFileNodePartKeyFrames);

						/// KeyFrame
					
						std::list<KeyFrame> *keyFrames = partKeyFrames->GetKeyFrames();
						for (std::list<KeyFrame>::iterator i = keyFrames->begin(); i != keyFrames->end(); ++i)
						{
							KeyFrame *keyFrame = &(*i);

							TiXmlElement xmlKeyFrame("KeyFrame");
							XMLFileNode xmlFileNodeKeyFrame(&xmlKeyFrame);

							keyFrame->Save(&xmlFileNodeKeyFrame);

							xmlPartKeyFrames.InsertEndChild(xmlKeyFrame);
						}

						xmlAnimation.InsertEndChild(xmlPartKeyFrames);
					}
				}

				xmlAnimations.InsertEndChild(xmlAnimation);
			}
		}
		xmlDoc.InsertEndChild(xmlAnimations);

		xmlDoc.SaveFile(Assets::GetContentPath() + filename);
	}
Exemple #22
0
// Mapmaker's try-to-find-a-point-in-a-keyframe code. This is used to update
// data association if a bad measurement was detected, or if a point
// was never searched for in a keyframe in the first place. This operates
// much like the tracker! So most of the code looks just like in
// TrackerData.h.
bool MapMakerServerBase::ReFind_Common(KeyFrame& kf, MapPoint& point)
{
  // abort if either a measurement is already in the map, or we've
  // decided that this point-kf combo is beyond redemption
  if (point.mMMData.spMeasurementKFs.count(&kf) || point.mMMData.spNeverRetryKFs.count(&kf))
    return false;

  if (point.mbBad)
    return false;

  if (kf.mpParent->mbBad)
    return false;

  // debug
  static GVars3::gvar3<int> gvnCrossCamera("CrossCamera", 1, GVars3::HIDDEN | GVars3::SILENT);
  if (!*gvnCrossCamera && kf.mCamName != point.mpPatchSourceKF->mCamName)
    return false;

  static PatchFinder finder;
  TooN::Vector<3> v3Cam = kf.mse3CamFromWorld * point.mv3WorldPos;

  TaylorCamera& camera = mmCameraModels[kf.mCamName];
  TooN::Vector<2> v2Image = camera.Project(v3Cam);

  if (camera.Invalid())
  {
    point.mMMData.spNeverRetryKFs.insert(&kf);
    return false;
  }

  CVD::ImageRef irImageSize = kf.maLevels[0].image.size();
  if (v2Image[0] < 0 || v2Image[1] < 0 || v2Image[0] > irImageSize[0] || v2Image[1] > irImageSize[1])
  {
    point.mMMData.spNeverRetryKFs.insert(&kf);
    return false;
  }

  TooN::Matrix<2> m2CamDerivs = camera.GetProjectionDerivs();
  finder.MakeTemplateCoarse(point, kf.mse3CamFromWorld, m2CamDerivs);

  if (finder.TemplateBad())
  {
    point.mMMData.spNeverRetryKFs.insert(&kf);
    return false;
  }

  int nScore;
  bool bFound = finder.FindPatchCoarse(CVD::ir(v2Image), kf, 4, nScore);  // Very tight search radius!
  if (!bFound)
  {
    point.mMMData.spNeverRetryKFs.insert(&kf);
    return false;
  }

  // If we found something, generate a measurement struct and put it in the map
  Measurement* pMeas = new Measurement;
  pMeas->nLevel = finder.GetLevel();
  pMeas->eSource = Measurement::SRC_REFIND;

  if (finder.GetLevel() > 0)
  {
    finder.MakeSubPixTemplate();
    finder.SetSubPixPos(finder.GetCoarsePosAsVector());
    finder.IterateSubPixToConvergence(kf, 8);
    pMeas->v2RootPos = finder.GetSubPixPos();
    pMeas->bSubPix = true;
  }
  else
  {
    pMeas->v2RootPos = finder.GetCoarsePosAsVector();
    pMeas->bSubPix = false;
  }

  if (kf.mmpMeasurements.count(&point))
    ROS_BREAK();  // This should never happen, we checked for this at the start.

  kf.AddMeasurement(&point, pMeas);

  // kf.mmpMeasurements[&point] = pMeas;
  // point.mMMData.spMeasurementKFs.insert(&kf);

  return true;
}
 void LoopClosing::CorrectLoop()
 {
     // Send a stop signal to Local Mapping
     // Avoid new keyframes are inserted while correcting the loop
     mpLocalMapper->RequestStop();
     
     // Wait until Local Mapping has effectively stopped
     //ros::Rate r(1e4);
     //while(ros::ok() && !mpLocalMapper->isStopped())
     while(!mpLocalMapper->isStopped())
     {
         //r.sleep();
         boost::this_thread::sleep(boost::posix_time::milliseconds(10000));
     }
     
     // Ensure current keyframe is updated
     mpCurrentKF->UpdateConnections();
     
     // Retrive keyframes connected to the current keyframe and compute corrected Sim3 pose by propagation
     mvpCurrentConnectedKFs = mpCurrentKF->GetVectorCovisibleKeyFrames();
     mvpCurrentConnectedKFs.push_back(mpCurrentKF);
     
     KeyFrameAndPose CorrectedSim3, NonCorrectedSim3;
     CorrectedSim3[mpCurrentKF]=mg2oScw;
     cv::Mat Twc = mpCurrentKF->GetPoseInverse();
     
     
     for(vector<KeyFrame*>::iterator vit=mvpCurrentConnectedKFs.begin(), vend=mvpCurrentConnectedKFs.end(); vit!=vend; vit++)
     {
         KeyFrame* pKFi = *vit;
         
         cv::Mat Tiw = pKFi->GetPose();
         
         if(pKFi!=mpCurrentKF)
         {
             cv::Mat Tic = Tiw*Twc;
             cv::Mat Ric = Tic.rowRange(0,3).colRange(0,3);
             cv::Mat tic = Tic.rowRange(0,3).col(3);
             g2o::Sim3 g2oSic(Converter::toMatrix3d(Ric),Converter::toVector3d(tic),1.0);
             g2o::Sim3 g2oCorrectedSiw = g2oSic*mg2oScw;
             //Pose corrected with the Sim3 of the loop closure
             CorrectedSim3[pKFi]=g2oCorrectedSiw;
         }
         
         cv::Mat Riw = Tiw.rowRange(0,3).colRange(0,3);
         cv::Mat tiw = Tiw.rowRange(0,3).col(3);
         g2o::Sim3 g2oSiw(Converter::toMatrix3d(Riw),Converter::toVector3d(tiw),1.0);
         //Pose without correction
         NonCorrectedSim3[pKFi]=g2oSiw;
     }
     
     // Correct all MapPoints obsrved by current keyframe and neighbors, so that they align with the other side of the loop
     for(KeyFrameAndPose::iterator mit=CorrectedSim3.begin(), mend=CorrectedSim3.end(); mit!=mend; mit++)
     {
         KeyFrame* pKFi = mit->first;
         g2o::Sim3 g2oCorrectedSiw = mit->second;
         g2o::Sim3 g2oCorrectedSwi = g2oCorrectedSiw.inverse();
         
         g2o::Sim3 g2oSiw =NonCorrectedSim3[pKFi];
         
         vector<MapPoint*> vpMPsi = pKFi->GetMapPointMatches();
         for(size_t iMP=0, endMPi = vpMPsi.size(); iMP<endMPi; iMP++)
         {
             MapPoint* pMPi = vpMPsi[iMP];
             if(!pMPi)
                 continue;
             if(pMPi->isBad())
                 continue;
             if(pMPi->mnCorrectedByKF==mpCurrentKF->mnId)
                 continue;
             
             // Project with non-corrected pose and project back with corrected pose
             cv::Mat P3Dw = pMPi->GetWorldPos();
             Eigen::Matrix<double,3,1> eigP3Dw = Converter::toVector3d(P3Dw);
             Eigen::Matrix<double,3,1> eigCorrectedP3Dw = g2oCorrectedSwi.map(g2oSiw.map(eigP3Dw));
             
             cv::Mat cvCorrectedP3Dw = Converter::toCvMat(eigCorrectedP3Dw);
             pMPi->SetWorldPos(cvCorrectedP3Dw);
             pMPi->mnCorrectedByKF = mpCurrentKF->mnId;
             pMPi->mnCorrectedReference = pKFi->mnId;
             pMPi->UpdateNormalAndDepth();
         }
         
         // Update keyframe pose with corrected Sim3. First transform Sim3 to SE3 (scale translation)
         Eigen::Matrix3d eigR = g2oCorrectedSiw.rotation().toRotationMatrix();
         Eigen::Vector3d eigt = g2oCorrectedSiw.translation();
         double s = g2oCorrectedSiw.scale();
         
         eigt *=(1./s); //[R t/s;0 1]
         
         cv::Mat correctedTiw = Converter::toCvSE3(eigR,eigt);
         
         pKFi->SetPose(correctedTiw);
         
         // Make sure connections are updated
         pKFi->UpdateConnections();
     }
     
     // Start Loop Fusion
     // Update matched map points and replace if duplicated
     for(size_t i=0; i<mvpCurrentMatchedPoints.size(); i++)
     {
         if(mvpCurrentMatchedPoints[i])
         {
             MapPoint* pLoopMP = mvpCurrentMatchedPoints[i];
             MapPoint* pCurMP = mpCurrentKF->GetMapPoint(i);
             if(pCurMP)
                 pCurMP->Replace(pLoopMP);
             else
             {
                 mpCurrentKF->AddMapPoint(pLoopMP,i);
                 pLoopMP->AddObservation(mpCurrentKF,i);
                 pLoopMP->ComputeDistinctiveDescriptors();
             }
         }
     }
     
     // Project MapPoints observed in the neighborhood of the loop keyframe
     // into the current keyframe and neighbors using corrected poses.
     // Fuse duplications.
     SearchAndFuse(CorrectedSim3);
     
     
     // After the MapPoint fusion, new links in the covisibility graph will appear attaching both sides of the loop
     map<KeyFrame*, set<KeyFrame*> > LoopConnections;
     
     for(vector<KeyFrame*>::iterator vit=mvpCurrentConnectedKFs.begin(), vend=mvpCurrentConnectedKFs.end(); vit!=vend; vit++)
     {
         KeyFrame* pKFi = *vit;
         vector<KeyFrame*> vpPreviousNeighbors = pKFi->GetVectorCovisibleKeyFrames();
         
         // Update connections. Detect new links.
         pKFi->UpdateConnections();
         LoopConnections[pKFi]=pKFi->GetConnectedKeyFrames();
         for(vector<KeyFrame*>::iterator vit_prev=vpPreviousNeighbors.begin(), vend_prev=vpPreviousNeighbors.end(); vit_prev!=vend_prev; vit_prev++)
         {
             LoopConnections[pKFi].erase(*vit_prev);
         }
         for(vector<KeyFrame*>::iterator vit2=mvpCurrentConnectedKFs.begin(), vend2=mvpCurrentConnectedKFs.end(); vit2!=vend2; vit2++)
         {
             LoopConnections[pKFi].erase(*vit2);
         }
     }
     
     mpTracker->ForceRelocalisation();
     
     Optimizer::OptimizeEssentialGraph(mpMap, mpMatchedKF, mpCurrentKF,  mg2oScw, NonCorrectedSim3, CorrectedSim3, LoopConnections);
     
     //Add edge
     mpMatchedKF->AddLoopEdge(mpCurrentKF);
     mpCurrentKF->AddLoopEdge(mpMatchedKF);
     
     std::cout << "Loop Closed!" << std::endl;
     
     // Loop closed. Release Local Mapping.
     mpLocalMapper->Release();
     
     mpMap->SetFlagAfterBA();
     
     mLastLoopKFid = mpCurrentKF->mnId;
 }
Exemple #24
0
void
NodeAnimPrivate::computeGroupRange()
{

    NodeGuiPtr nodeUI = nodeGui.lock();
    NodePtr node = nodeUI->getNode();
    if (!node) {
        return;
    }

    AnimationModulePtr isAnimModel = toAnimationModule(model.lock());
    if (!isAnimModel) {
        return;
    }
    NodeGroupPtr nodegroup = node->isEffectNodeGroup();
    assert(nodegroup);
    if (!nodegroup) {
        return;
    }


    AnimationModuleTreeView* treeView = isAnimModel->getEditor()->getTreeView();

    NodesList nodes = nodegroup->getNodes();

    std::set<double> times;

    for (NodesList::const_iterator it = nodes.begin(); it != nodes.end(); ++it) {
        NodeAnimPtr childAnim = isAnimModel->findNodeAnim(*it);

        if (!childAnim) {
            continue;
        }

        if (!treeView->isItemVisibleRecursive(childAnim->getTreeItem())) {
            continue;
        }

        childAnim->refreshFrameRange();
        RangeD childRange = childAnim->getFrameRange();
        times.insert(childRange.min);
        times.insert(childRange.max);

        // Also check the child knobs keyframes
        NodeGuiPtr childGui = childAnim->getNodeGui();
        const KnobsVec &knobs = childGui->getNode()->getKnobs();

        for (KnobsVec::const_iterator it2 = knobs.begin(); it2 != knobs.end(); ++it2) {

            if ( !(*it2)->isAnimationEnabled() || !(*it2)->hasAnimation() ) {
                continue;
            } else {
                // For each dimension and for each split view get the first/last keyframe (if any)
                int nDims = (*it2)->getNDimensions();
                std::list<ViewIdx> views = (*it2)->getViewsList();
                for (std::list<ViewIdx>::const_iterator it3 = views.begin(); it3 != views.end(); ++it3) {
                    for (int i = 0; i < nDims; ++i) {
                        CurvePtr curve = (*it2)->getCurve(*it3, DimIdx(i));
                        if (!curve) {
                            continue;
                        }
                        int nKeys = curve->getKeyFramesCount();
                        if (nKeys > 0) {
                            KeyFrame k;
                            if (curve->getKeyFrameWithIndex(0, &k)) {
                                times.insert( k.getTime() );
                            }
                            if (curve->getKeyFrameWithIndex(nKeys - 1, &k)) {
                                times.insert( k.getTime() );
                            }
                        }
                    }
                }
            }
        } // for all knobs
    } // for all children nodes

    if (times.size() <= 1) {
        frameRange.min = 0;
        frameRange.max = 0;
    } else {
        frameRange.min = *times.begin();
        frameRange.max = *times.rbegin();
    }

} // computeGroupRange
 bool LoopClosing::DetectLoop()
 {
     {
         boost::mutex::scoped_lock lock(mMutexLoopQueue);
         mpCurrentKF = mlpLoopKeyFrameQueue.front();
         mlpLoopKeyFrameQueue.pop_front();
         // Avoid that a keyframe can be erased while it is being process by this thread
         mpCurrentKF->SetNotErase();
     }
     
     //If the map contains less than 10 KF or less than 10KF have passed from last loop detection
     if(mpCurrentKF->mnId<mLastLoopKFid+10)
     {
         mpKeyFrameDB->add(mpCurrentKF);
         mpCurrentKF->SetErase();
         return false;
     }
     
     // Compute reference BoW similarity score
     // This is the lowest score to a connected keyframe in the covisibility graph
     // We will impose loop candidates to have a higher similarity than this
     vector<KeyFrame*> vpConnectedKeyFrames = mpCurrentKF->GetVectorCovisibleKeyFrames();
     DBoW2::BowVector CurrentBowVec = mpCurrentKF->GetBowVector();
     float minScore = 1;
     for(size_t i=0; i<vpConnectedKeyFrames.size(); i++)
     {
         KeyFrame* pKF = vpConnectedKeyFrames[i];
         if(pKF->isBad())
             continue;
         DBoW2::BowVector BowVec = pKF->GetBowVector();
         
         float score = mpORBVocabulary->score(CurrentBowVec, BowVec);
         
         if(score<minScore)
             minScore = score;
     }
     
     // Query the database imposing the minimum score
     vector<KeyFrame*> vpCandidateKFs = mpKeyFrameDB->DetectLoopCandidates(mpCurrentKF, minScore);
     
     
     // If there are no loop candidates, just add new keyframe and return false
     if(vpCandidateKFs.empty())
     {
         mpKeyFrameDB->add(mpCurrentKF);
         mvConsistentGroups.clear();
         mpCurrentKF->SetErase();
         return false;
     }
     
     // For each loop candidate check consistency with previous loop candidates
     // Each candidate expands a covisibility group (keyframes connected to the loop candidate in the covisibility graph)
     // A group is consistent with a previous group if they share at least a keyframe
     // We must detect a consistent loop in several consecutive keyframe to accept it
     mvpEnoughConsistentCandidates.clear();
     
     vector<ConsistentGroup> vCurrentConsistentGroups;
     vector<bool> vbConsistentGroup(mvConsistentGroups.size(),false);
     for(size_t i=0, iend=vpCandidateKFs.size(); i<iend; i++)
     {
         KeyFrame* pCandidateKF = vpCandidateKFs[i];
         
         set<KeyFrame*> spCandidateGroup = pCandidateKF->GetConnectedKeyFrames();
         spCandidateGroup.insert(pCandidateKF);
         
         bool bEnoughConsistent = false;
         bool bConsistentForSomeGroup = false;
         for(size_t iG=0, iendG=mvConsistentGroups.size(); iG<iendG; iG++)
         {
             set<KeyFrame*> sPreviousGroup = mvConsistentGroups[iG].first;
             
             bool bConsistent = false;
             for(set<KeyFrame*>::iterator sit=spCandidateGroup.begin(), send=spCandidateGroup.end(); sit!=send;sit++)
             {
                 if(sPreviousGroup.count(*sit))
                 {
                     bConsistent=true;
                     bConsistentForSomeGroup=true;
                     break;
                 }
             }
             
             if(bConsistent)
             {
                 int nPreviousConsistency = mvConsistentGroups[iG].second;
                 int nCurrentConsistency = nPreviousConsistency + 1;
                 if(!vbConsistentGroup[iG])
                 {
                     ConsistentGroup cg = make_pair(spCandidateGroup,nCurrentConsistency);
                     vCurrentConsistentGroups.push_back(cg);
                     vbConsistentGroup[iG]=true; //this avoid to include the same group more than once
                 }
                 if(nCurrentConsistency>=mnCovisibilityConsistencyTh && !bEnoughConsistent)
                 {
                     mvpEnoughConsistentCandidates.push_back(pCandidateKF);
                     bEnoughConsistent=true; //this avoid to insert the same candidate more than once
                 }
             }
         }
         
         // If the group is not consistent with any previous group insert with consistency counter set to zero
         if(!bConsistentForSomeGroup)
         {
             ConsistentGroup cg = make_pair(spCandidateGroup,0);
             vCurrentConsistentGroups.push_back(cg);
         }
     }
     
     // Update Covisibility Consistent Groups
     mvConsistentGroups = vCurrentConsistentGroups;
     
     
     // Add Current Keyframe to database
     mpKeyFrameDB->add(mpCurrentKF);
     
     if(mvpEnoughConsistentCandidates.empty())
     {
         mpCurrentKF->SetErase();
         return false;
     }
     else
     {
         return true;
     }
     
     mpCurrentKF->SetErase();
     return false;
 }
	bool AnimationTrack::loadFromXML(TiXmlElement *xmlNodeTrack)
	{
		xmlNodeTrack->QueryStringAttribute("name",&m_name);			
		stringc dataType;
		xmlNodeTrack->QueryStringAttribute("dataType",&dataType);
		m_dataType = stringToDataType(dataType);
		
		TiXmlHandle hXmlNode(xmlNodeTrack);

		//load keyframes data
		TiXmlElement* xmlNodeTimes = hXmlNode.FirstChild( "times" ).Element();
		if(xmlNodeTimes!=0)
		{
			int kfNum = 0;

			//times
			const char* timesStr = xmlNodeTimes->GetText();
			if(timesStr!=0)
			{
				float *times=0;
				kfNum = StringUtil::splitOutFloats(times, timesStr);

				for(int i=0; i<kfNum; ++i)
				{
					KeyFrame* kf = createKeyFrame();
					if(kf!=0)
					{
						kf->setTime(times[i]);
						m_keyFrames.push_back(kf);
					}
				}

				delete times;
			}

			//at least has 1 kf
			if(kfNum>0)
			{
				//KF datas
				float *kfDatas=0;
				TiXmlElement* xmlNodeKFs = hXmlNode.FirstChild( "keyframes" ).Element();
				if(xmlNodeKFs!=0)
				{
					int valueNum = StringUtil::splitOutFloats(kfDatas, xmlNodeKFs->GetText());
					u32 dataElemNum = m_keyFrames[0]->getDataElementNum();
					FLT_ASSERT(dataElemNum>0 && valueNum/dataElemNum==kfNum);					
					
					for(int i=0; i<kfNum; ++i)
					{
						m_keyFrames[i]->loadValue(kfDatas, i);						
					}

					delete kfDatas;
				}

				//interp datas
				TiXmlElement* xmlNodeInterps = hXmlNode.FirstChild( "interps" ).Element();
				if(xmlNodeInterps!=0)
				{
					array_t<stringc> strs;
					strs = StringUtil::split(xmlNodeInterps->GetText(), ",", kfNum);
										
					FLT_ASSERT(strs.size()==kfNum);					

					for(int i=0; i<kfNum; ++i)
					{
						m_keyFrames[i]->setInterpTypeByString(strs[i]);						
					}					
				}
			}

			//create a computed frame, for interpolation
			m_computedFrame = createKeyFrame();
		}

		return true;
	}
Exemple #27
0
 void MapPoint::ComputeDistinctiveDescriptors()
 {
     // Retrieve all observed descriptors
     vector<cv::Mat> vDescriptors;
     
     map<KeyFrame*,size_t> observations;
     
     {
         boost::mutex::scoped_lock lock1(mMutexFeatures);
         if(mbBad)
             return;
         observations=mObservations;
     }
     
     if(observations.empty())
         return;
     
     vDescriptors.reserve(observations.size());
     
     for(map<KeyFrame*,size_t>::iterator mit=observations.begin(), mend=observations.end(); mit!=mend; mit++)
     {
         KeyFrame* pKF = mit->first;
         
         if(!pKF->isBad())
             vDescriptors.push_back(pKF->GetDescriptor(mit->second));
     }
     
     if(vDescriptors.empty())
         return;
     
     // Compute distances between them
     const size_t N = vDescriptors.size();
     
     float Distances[N][N];
     for(size_t i=0;i<N;i++)
     {
         Distances[i][i]=0;
         for(size_t j=i+1;j<N;j++)
         {
             int distij = ORBmatcher::DescriptorDistance(vDescriptors[i],vDescriptors[j]);
             Distances[i][j]=distij;
             Distances[j][i]=distij;
         }
     }
     
     // Take the descriptor with least median distance to the rest
     int BestMedian = INT_MAX;
     int BestIdx = 0;
     for(size_t i=0;i<N;i++)
     {
         vector<int> vDists(Distances[i],Distances[i]+N);
         sort(vDists.begin(),vDists.end());
         int median = vDists[0.5*(N-1)];
         
         if(median<BestMedian)
         {
             BestMedian = median;
             BestIdx = i;
         }
     }
     
     {
         boost::mutex::scoped_lock lock(mMutexFeatures);
         mDescriptor = vDescriptors[BestIdx].clone();
     }
 }
void
moveGroupNode(DopeSheetEditor* model,
              const NodePtr& node,
              double dt)
{
    NodeGroupPtr group = node->isEffectNodeGroup();

    assert(group);
    NodesList nodes;
    group->getNodes_recursive(nodes, true);

    for (NodesList::iterator it = nodes.begin(); it != nodes.end(); ++it) {
        NodeGuiPtr nodeGui = boost::dynamic_pointer_cast<NodeGui>( (*it)->getNodeGui() );
        assert(nodeGui);
        std::string pluginID = (*it)->getPluginID();
        NodeGroupPtr isChildGroup = (*it)->isEffectNodeGroup();

        // Move readers
#ifndef NATRON_ENABLE_IO_META_NODES
        if ( ReadNode::isBundledReader( pluginID, node->getApp()->wasProjectCreatedWithLowerCaseIDs() ) ) {
#else
        if (pluginID == PLUGINID_NATRON_READ) {
#endif
            moveReader(*it, dt);
        } else if (pluginID == PLUGINID_OFX_TIMEOFFSET) {
            moveTimeOffset(*it, dt);
        } else if (pluginID == PLUGINID_OFX_FRAMERANGE) {
            moveFrameRange(*it, dt);
        } else if (isChildGroup) {
            moveGroupNode(model, *it, dt);
        }

        // Move keyframes
        const KnobsVec &knobs = (*it)->getKnobs();

        for (KnobsVec::const_iterator knobIt = knobs.begin(); knobIt != knobs.end(); ++knobIt) {
            const KnobIPtr& knob = *knobIt;
            if ( !knob->hasAnimation() ) {
                continue;
            }

            for (int dim = 0; dim < knob->getDimension(); ++dim) {
                if ( !knob->isAnimated( dim, ViewIdx(0) ) ) {
                    continue;
                }
                KeyFrameSet keyframes = knob->getCurve(ViewIdx(0), dim)->getKeyFrames_mt_safe();

                for (KeyFrameSet::iterator kfIt = keyframes.begin(); kfIt != keyframes.end(); ++kfIt) {
                    KeyFrame kf = (*kfIt);
                    KeyFrame fake;

                    knob->moveValueAtTime(eCurveChangeReasonDopeSheet, kf.getTime(), ViewSpec::all(), dim, dt, 0, &fake);
                }
            }
        }
    }
} // moveGroupNode

NATRON_NAMESPACE_ANONYMOUS_EXIT


////////////////////////// DSMoveKeysCommand //////////////////////////

DSMoveKeysAndNodesCommand::DSMoveKeysAndNodesCommand(const DSKeyPtrList &keys,
                                                     const std::vector<DSNodePtr >& nodes,
                                                     double dt,
                                                     DopeSheetEditor *model,
                                                     QUndoCommand *parent)
    : QUndoCommand(parent),
    _keys(keys),
    _nodes(),
    _dt(dt),
    _model(model)
{
    setText( tr("Move selected keys") );
    std::set<NodePtr > nodesSet;
    for (std::vector<DSNodePtr >::const_iterator it = nodes.begin(); it != nodes.end(); ++it) {
        DopeSheetItemType type = (*it)->getItemType();
        if ( (type != eDopeSheetItemTypeReader) &&
             ( type != eDopeSheetItemTypeGroup) &&
             ( type != eDopeSheetItemTypeTimeOffset) &&
             ( type != eDopeSheetItemTypeFrameRange) ) {
            //Note that Retime nodes cannot be moved
            continue;
        }
        _nodes.push_back(*it);
        nodesSet.insert( (*it)->getInternalNode() );
        NodeGroupPtr isGroup = (*it)->getInternalNode()->isEffectNodeGroup();
        if (isGroup) {
            NodesList recurseNodes;
            isGroup->getNodes_recursive(recurseNodes, true);
            for (NodesList::iterator it = recurseNodes.begin(); it != recurseNodes.end(); ++it) {
                nodesSet.insert(*it);
            }
        }
    }

    for (DSKeyPtrList::iterator it = _keys.begin(); it != _keys.end(); ++it) {
        KnobHolderPtr holder = (*it)->getContext()->getInternalKnob()->getHolder();
        assert(holder);
        EffectInstancePtr isEffect = toEffectInstance(holder);
        if (isEffect) {
            nodesSet.insert( isEffect->getNode() );
        }
    }

    for (std::set<NodePtr >::iterator it = nodesSet.begin(); it != nodesSet.end(); ++it) {
        _allDifferentNodes.push_back(*it);
    }
}

void
DSMoveKeysAndNodesCommand::undo()
{
    moveSelection(-_dt);
}
Exemple #29
0
	void Puppet::Load(const std::string &filename, Entity *entity)
	{
		this->filename = filename;
		animations.clear();
		// delete parts?
		parts.clear();
		
		TiXmlDocument xmlDoc(Assets::GetContentPath() + filename);
		
		if (xmlDoc.LoadFile())
		{
			/// TextureAtlas
			TiXmlElement *xmlTextureAtlas = xmlDoc.FirstChildElement("TextureAtlas");
			if (xmlTextureAtlas)
			{
				textureAtlas = new TextureAtlas();
				textureAtlas->Load(xmlTextureAtlas);
			}
			
			/// Parts
			TiXmlElement *xmlParts = xmlDoc.FirstChildElement("Parts");
			if (xmlParts)
			{
				LoadParts(xmlParts, entity);
			}

			/// Animations
			TiXmlElement *xmlAnimations = xmlDoc.FirstChildElement("Animations");
			if (xmlAnimations)
			{
				/// Animation
				TiXmlElement *xmlAnimation = xmlAnimations->FirstChildElement("Animation");
				while (xmlAnimation)
				{
					Animation animation;
                    XMLFileNode xmlFileNodeKeyFrameAnim(xmlAnimation);
					animation.Load(&xmlFileNodeKeyFrameAnim);

					/// PartKeyFrames
					TiXmlElement *xmlPartKeyFrames = xmlAnimation->FirstChildElement("PartKeyFrames");
					while (xmlPartKeyFrames)
					{
						PartKeyFrames partKeyFrames;
						partKeyFrames.SetPuppet(this);
                        XMLFileNode xmlFileNodeKeyFramePart(xmlPartKeyFrames);
						partKeyFrames.Load(&xmlFileNodeKeyFramePart);

						/// KeyFrame
						TiXmlElement *xmlKeyFrame = xmlPartKeyFrames->FirstChildElement("KeyFrame");
						while (xmlKeyFrame)
						{
							KeyFrame keyFrame;
                            XMLFileNode xmlFileNodeKeyFrame(xmlKeyFrame);
							keyFrame.Load(&xmlFileNodeKeyFrame);
							partKeyFrames.AddKeyFrame(keyFrame);

							xmlKeyFrame = xmlKeyFrame->NextSiblingElement("KeyFrame");
						}

						animation.AddPartKeyFrames(partKeyFrames);

						xmlPartKeyFrames = xmlPartKeyFrames->NextSiblingElement("PartKeyFrames");
					}

					animation.RefreshDuration();
					animations.push_back(animation);

					xmlAnimation = xmlAnimation->NextSiblingElement("Animation");
				}
			}
		}
		else
		{
			Debug::Log("Warning: Could not open puppet file: " + Assets::GetContentPath() + filename);
			Debug::Log("         " + std::string(xmlDoc.ErrorDesc()));
			printf("         Row: %d\n", xmlDoc.ErrorRow());
		}
	}
Exemple #30
0
void Plugin::load(FileXML *file)
{
	int result = 0;
	int first_keyframe = 1;
 	in = 0;
	out = 0;
// Currently show is ignored when loading
	show = 0;
	on = 0;
	while(keyframes->last) delete keyframes->last;

	do{
		result = file->read_tag();

//printf("Plugin::load 1 %s\n", file->tag.get_title());
		if(!result)
		{
			if(file->tag.title_is("/PLUGIN"))
			{
				result = 1;
			}
			else
			if(file->tag.title_is("SHARED_LOCATION"))
			{
				shared_location.load(file);
			}
			else
			if(file->tag.title_is("IN"))
			{
				in = 1;
			}
			else
			if(file->tag.title_is("OUT"))
			{
				out = 1;
			}
			else
			if(file->tag.title_is("SHOW"))
			{
//				show = 1;
			}
			else
			if(file->tag.title_is("ON"))
			{
				on = 1;
			}
			else
			if(file->tag.title_is("KEYFRAME"))
			{
// Default keyframe
				if(first_keyframe)
				{
					keyframes->default_auto->load(file);
					first_keyframe = 0;
				}
				else
// Override default keyframe
				{
					KeyFrame *keyframe = (KeyFrame*)keyframes->append(new KeyFrame(edl, keyframes));
					keyframe->position = file->tag.get_property("POSITION", (int64_t)0);
					keyframe->load(file);
				}
			}
		}
	}while(!result);
}