Esempio n. 1
0
void ContourManager::children(const ContourVector & _contours, const Hierarchy & _hierarchy, int _parentIndex, ContourVector & _children)
{
    _children.clear();

    int currentChild = _hierarchy[_parentIndex][HIERARCHY_INDEX_FIRST_CHILD];//First child

    while (currentChild >= 0)
    {
        _children.push_back(_contours.at(currentChild));

        currentChild = _hierarchy[currentChild][HIERARCHY_INDEX_NEXT];
    }//while (currentChild >= 0)
}//children
Esempio n. 2
0
/*!
 * \brief buildMassCenters Use this function to retrieve the mass centers of _contours.
 * \param _contours Input contours.
 * \param _massCenters Output mass centers.
 */
void ContourManager::buildMassCenters(const ContourVector & _contours, PointVector & _massCenters)
{
    ContourVector::size_type contourSize = _contours.size();
    _massCenters.resize(contourSize);

    for (ContourVector::size_type i = 0; i < contourSize; ++i)
    {
        massCenter(_contours[i],_massCenters[i]);
    }//for(ContourVector::size_type i = 0; i < contourSize; ++i)
}//buildMassCenters
Esempio n. 3
0
/*!
 * \brief buildMassCenters Use this function to retrieve the bounding rects of _contours.
 * \param _contours Input contours.
 * \param _massCenters Output bounding rects. Its size must be at least equal to _contours size. No control is made.
 */
void ContourManager::buildBoundings(const ContourVector & _contours, RectVector & _boundings)
{
    ContourVector::size_type contourSize = _contours.size();
    _boundings.resize(contourSize);

#if SD_APPROX_CURVES
    //Approximate contour curves
    std::vector<ContourCurve> contourCurves(contourSize);
#endif//SD_APPROX_CURVES

    //Retrieving bounding rects
    for(ContourVector::size_type i = 0; i < contourSize; ++i)
    {
#if SD_APPROX_CURVES
        cv::approxPolyDP(cv::Mat(_contours[i]), contourCurves[i], 3, true);
        _boundings[i] = cv::boundingRect(cv::Mat(contourCurves[i]));
#else
        _boundings[i] = cv::boundingRect(cv::Mat(_contours[i]));
#endif//SD_APPROX_CURVES
    }//for(ContourVector::size_type i = 0; i < contourSize; ++i)
}//buildBoundings
void DepthProcessor::findBlobs()
{
  if (!getDepthData())
    return;
	
	Surface8u to8(mDepthSource->getDepthImage());
	cv::Mat input( toOcv( to8));
	
	cv::Mat gray;
	cv::Mat thresh;
	
  cv::cvtColor( input, gray, CV_RGB2GRAY );
  
  cv::threshold(gray, gray, mDepthLowPass, 0, CV_THRESH_TOZERO_INV);
  
  if (mInitInitial < mInitFrames)
  {
    if (mInitInitial == 0)
      mInitial = gray.clone();
    else
      mInitial = cv::max(gray, mInitial);
    mInitInitial++;
    return;
  }
  
  gray -= mInitial;

  cv::accumulateWeighted(gray, mLastGray, 0.9);
  mLastGray.convertTo(gray, CV_8U);
  
  cv::threshold( gray, thresh, mStepFrom, 255, CV_THRESH_BINARY );
  
  auto& blobs = mBlobs[1 - mIndexFG];
  
	blobs.clear();
	float largest = mAreaThreshold;
  
  mContourSurfaces.pushFront(thresh.clone());
  
  ContourVector vec;
  cv::findContours( thresh, vec, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE );
  
  for( ContourVector::iterator iter = vec.begin(); iter != vec.end(); ++iter )
  {
    float a = cv::contourArea(*iter);
    if (a > largest)
    {
      Blob b;
      b.mContourArea = a;
      b.mContourPoints.resize(iter->size());
      copy(iter->begin(), iter->end(), b.mContourPoints.begin());
      blobs.push_back(b);
      push_heap(blobs.begin(), blobs.end(), SortDescendingArea());
      if (blobs.size() > DepthProcessor::smMAX_BLOBS)
      {
        blobs.erase(blobs.end()-1);
        largest = blobs.rbegin()->mContourArea;
      }
    }
  }
  
	for (BlobVector::iterator i = blobs.begin(); i != blobs.end(); i++)
	{
		i->mCentroid.x = i->mCentroid.y = 0.0f;
    float mag = 10000.0f;
		i->mLeftMost.x = mag;
		i->mRightMost.x = -mag;
		i->mTopMost.y = mag;
		i->mBottomMost.y = -mag;
    i->mBounds.x1 = i->mBounds.y1 = mag;
    i->mBounds.x2 = i->mBounds.y2 = -mag;
		for (vector<cv::Point>::iterator pt = i->mContourPoints.begin(); pt != i->mContourPoints.end(); ++pt)
		{
			i->mCentroid.x += pt->x;
			i->mCentroid.y += pt->y;
      i->mBounds.include(Vec2f(pt->x, pt->y));
			if (i->mLeftMost.x > pt->x)
			{
				i->mLeftMost.x = pt->x;
				i->mLeftMost.y = pt->y;
			}
			if (i->mRightMost.x < pt->x)
			{
				i->mRightMost.x = pt->x;
				i->mRightMost.y = pt->y;
			}
			if (i->mTopMost.y > pt->y)
			{
				i->mTopMost.x = pt->x;
				i->mTopMost.y = pt->y;
			}
			if (i->mBottomMost.y < pt->y)
			{
				i->mBottomMost.x = pt->x;
				i->mBottomMost.y = pt->y;
			}
		}
		float sz = i->mContourPoints.size();
		if (sz > 0.0f)
		{
			i->mCentroid.x /= sz;
			i->mCentroid.y /= sz;
		}     
		i->mZDist = *to8.getDataRed(i->mCentroid);
		// Loop through and do z calc
		float steps = 10.0f;
		Vec3f step = (i->mBottomMost - i->mTopMost) / steps;
		Vec3f sample = i->mTopMost;
		for (int x = 0; x < steps; x++)
		{
      if (thresh.at<uint8_t>(cv::Point(sample.x, sample.y)) > 0)
      {
        float val = *to8.getDataRed(Vec2f(sample.x, sample.y));
        if (val < mDepthLowPass) 
          i->mZDist = max(i->mZDist, val);
      }
      sample += step; 
		}
		step = (i->mRightMost - i->mLeftMost) / steps;
		sample = i->mLeftMost;
		for (int x = 0; x < steps; x++)
		{
      if (thresh.at<uint8_t>(cv::Point(sample.x, sample.y)) > 0)
      {
        float val = *to8.getDataRed(Vec2f(sample.x, sample.y));
        if (val < mDepthLowPass)
          i->mZDist = max(i->mZDist, val);
      }
      sample += step;
		}
	}
	sort(blobs.begin(), blobs.end(), SortDescendingZ());
  {
    BlobLock b(mBlobMutex);
    mIndexFG = 1 - mIndexFG;
  }
}
void MotionTrackingTestApp::draw()
{
   // gl::setViewport( getWindowBounds() );
    // clear out the window with black
	gl::clear( Color( 1, 1, 1 ) );
    
//    if( mSurface ){
//        if( mTexture ){
//            mTexture->update( mSurface );
//        } else {
//            mTexture = gl::Texture::create( mSurface );
//        }
//        gl::draw( mTexture, mTexture->getBounds(), getWindowBounds() );
//    }
    
    if( mSurfaceDepth ){
        if( mTextureDepth ){
            mTextureDepth->update( Channel32f( mSurfaceDepth ) );
        } else {
            mTextureDepth = gl::Texture::create( Channel32f( mSurfaceDepth ) );
        }
        gl::color( Color::white() );
        gl::draw( mTextureDepth, mTextureDepth->getBounds() );
    }
    gl::pushMatrices();
    gl::translate( Vec2f( 320, 0 ) );
    if( mSurfaceBlur ){
        if( mTextureDepth ){
            mTextureDepth->update( Channel32f( mSurfaceBlur ) );
        } else {
            mTextureDepth = gl::Texture::create( Channel32f( mSurfaceBlur ) );
        }
        gl::draw( mTextureDepth, mTextureDepth->getBounds() );
    }
    gl::translate( Vec2f( 0, 240 ) );
    if( mSurfaceSubtract ){
        if( mTextureDepth ){
            mTextureDepth->update( Channel32f( mSurfaceSubtract ) );
        } else {
            mTextureDepth = gl::Texture::create( Channel32f( mSurfaceSubtract ) );
        }
        gl::draw( mTextureDepth, mTextureDepth->getBounds() );
    }
    gl::translate( Vec2f( -320, 0 ) );
    for( ContourVector::iterator iter = mContours.begin(); iter != mContours.end(); ++iter ){
        glBegin( GL_LINE_LOOP );
            for( vector< cv::Point >::iterator pt = iter->begin(); pt != iter->end(); ++pt ){
                gl::color( Color( 1.0f, 0.0f, 0.0f ) );
                gl::vertex( fromOcv( *pt ) );
            }
            glEnd();
    }
    gl::translate( Vec2f( 0, 240 ) );
    for( int i=0; i<mTrackedShapes.size(); i++){
        glBegin( GL_POINTS );
        for( int j=0; j<mTrackedShapes[i].hull.size(); j++ ){
           gl::color( Color( 1.0f, 0.0f, 0.0f ) );
           gl::vertex( fromOcv( mTrackedShapes[i].hull[j] ) );
        }
        glEnd();
    }
    gl::popMatrices();
    mParams->draw();
}
void MotionTrackingTestApp::onDepth( openni::VideoFrameRef frame, const OpenNI::DeviceOptions& deviceOptions){
    mInput = toOcv( OpenNI::toChannel16u( frame ) );
    
    cv::Mat withoutBlack;
    withoutBlack = removeBlack( mInput, mNearLimit, mFarLimit );
    
    cv::Mat eightBit;
    cv::Mat thresh;
    
    // convert to RGB color space, with some compensation
    withoutBlack.convertTo( eightBit, CV_8UC3, 0.1/1.0  );
    cv::bitwise_not(eightBit, eightBit);
    
    mContours.clear();
    mApproxContours.clear();
    cv::threshold( eightBit, thresh, mThresh, mMaxVal, CV_8U );
    cv::findContours( thresh, mContours, mHierarchy, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE );
    
    vector<cv::Point> approx;
    // approx number of points per contour
    for( int i=0; i<mContours.size(); i++ ) {
        cv::approxPolyDP(mContours[i], approx, 3, true );
        mApproxContours.push_back( approx );
    }
    
    // get data that we can later compare
    mShapes.clear();
    mShapes = getEvaluationSet( mApproxContours, 75, 100000 );
    
    // find the nearest match for each shape
    for( int i = 0; i<mTrackedShapes.size(); i++ ){
        Shape* nearestShape = findNearestMatch( mTrackedShapes[i], mShapes, 5000 );
        
        if( nearestShape != NULL){
            // update our tracked contour
            // last frame seen
            nearestShape->matchFound = true;
            mTrackedShapes[i].centroid = nearestShape->centroid;
            mTrackedShapes[i].lastFrameSeen = ci::app::getElapsedFrames();
            mTrackedShapes[i].hull.clear();
            mTrackedShapes[i].hull = nearestShape->hull;
        }
    }
    
    // if shape->matchFound is false, add it as a new shape
    for( int i = 0; i<mShapes.size(); i++ ){
        if( mShapes[i].matchFound == false ){
            mShapes[i].ID = shapeUID;
            mShapes[i].lastFrameSeen = ci::app::getElapsedFrames();
            mTrackedShapes.push_back( mShapes[i]);
            shapeUID++;
//            std::cout << "adding a new tracked shape with ID: " << mShapes[i].ID << std::endl;
        }
    }
    
    // if we didnt find a match for x frames, delete the tracked shape
    for( vector<Shape>::iterator it=mTrackedShapes.begin(); it!=mTrackedShapes.end(); ){
//        std::cout << "tracked shapes size: " << mTrackedShapes.size() << std::endl;
        if( ci::app::getElapsedFrames() - it->lastFrameSeen > 20 ){
//            std::cout << "deleting shape with ID: " << it->ID << std::endl;
            it = mTrackedShapes.erase(it);
        } else {
            ++it;
        }
    }
    
    cv::Mat gray8Bit;
    withoutBlack.convertTo( gray8Bit, CV_8UC3, 0.1/1.0  );
    
    mSurfaceDepth = Surface8u( fromOcv( mInput  ) );
    mSurfaceBlur = Surface8u( fromOcv( withoutBlack ) );
    mSurfaceSubtract = Surface8u( fromOcv( eightBit ) );
}