コード例 #1
0
double WeightedDIDCompass::currentView( Img* CV ) {
    if ( learnMode == LEARNING ) {
        if ( learnIndex >= learnStart && learnIndex <= learnStop )
            learn(SS, CV);
        learnIndex++;
        return computeAngle(SS, CV, false);
    } else if ( learnMode == NO_LEARNING )
        return computeAngle(SS, CV, false);
    else if ( learnMode == USING )
        return computeAngle(SS, CV, true);
}
コード例 #2
0
ファイル: vpMeEllipse.cpp プロジェクト: tswang/visp
/*!
 * \brief computeAngle
 */
void
vpMeEllipse::computeAngle(int ip1, int jp1, int ip2, int jp2)
{

  double a1, a2 ;
  computeAngle(ip1,jp1,a1, ip2, jp2,a2) ;
}
コード例 #3
0
// Adds the item to our hashmap. Returns true if hitpoint was inside the relevant volume or falls otherwise.
bool CircularSimulationResult::addItem(hpvec3 point) {
	// Check whether the point is in our Z range, if not abort
	if (!isInZRange(point)) {
		return false;
	}

	// Compute the angles of the triangle points to the reference dir
	hpreal angle = computeAngle(point);
	hpuint angleSlot = computeAngleSlot(angle);

	hpuint posZSlot = convertPosZToPosZSlot(point.z);
	hpuint slot = computeSlot(angleSlot, posZSlot);

	assert(angleSlot < m_angleSteps);
	assert(posZSlot < m_posZSteps);

	hpreal radius = computeRadiusXY(point);
	hpreal oldRadius = getItem(angleSlot, posZSlot);

	if (oldRadius > radius) {
		m_entries->erase(slot);
		m_entries->insert(std::pair<hpuint,hpreal>(slot, radius));
	}
	return true;
}
コード例 #4
0
void ArrowLine::drawShape(QPainter &p)
{
    p.setPen(darkGray);
    QCanvasLine::drawShape(p);

    double angle = computeAngle(startPoint().x(),
                                startPoint().y(),
                                endPoint().x(),
                                endPoint().y());
    QPointArray pts(3);

    QWMatrix m;
    int x, y;
    m.rotate(angle);
    m.map(-5, -2, &x, &y);
    pts.setPoint(0, x, y);
    m.map(-5, 2, &x, &y);
    pts.setPoint(1, x, y);
    m.map(0, 0, &x, &y);
    pts.setPoint(2, x, y);

    pts.translate(endPoint().x(), endPoint().y());

    p.setBrush(QColor(darkGray));
    p.drawPolygon(pts);
}
コード例 #5
0
void RollingballApplet::setFrame( cv::Mat& frame )
{
    // Filter pipeline
    mCalibrationFilter->setFrame( frame );
    mCalibrationFilter->process();
    mFrame = mCalibrationFilter->getCalibratedFrame();

    // compute plate angle
    mAlpha = computeAngle();

    //
    // compute new ball position
    //
    if(!mDead)
    {
        mSpeed *= 0.98;                                // frottement
        mSpeed += -6.0 * sin(M_PI/180.0 * mAlpha);    // acceleration
        mSpeed = qBound(-20.0, mSpeed, 20.0);           // limit speed

        mStep += mSpeed;
        if(mStep > W)
        {
            mStep = W;
            killPlayer(true);
        }
        if(mStep < -W)
        {
            mStep = -W;
            killPlayer(true);
        }
    }

    update();
}
コード例 #6
0
ファイル: vplStroke.cpp プロジェクト: msahlen/vpl
    void Stroker::miterJoiner(Stroker* stroker,
                              const Vector& point,
                              const Vector& beforeNormal,
                              const Vector& afterNormal)
    {
        Vector realPoint = point;
        Vector after = afterNormal;
        Vector before = beforeNormal;

        stroker->inverseScale_.transform(after);
        stroker->inverseScale_.transform(before);
        stroker->inverseScale_.transform(realPoint);

        DynamicArray<float>* outer = &stroker->outerPoints_;
        DynamicArray<float>* inner = &stroker->innerPoints_;

        Winding winding = stroker->determineWinding(before,after);

        if(winding == cCounterclockwise)
        {
            swap(outer,inner);
            before.invert();
            after.invert();
        }

        // Undo perpendiculate
        Vector prevDir(-before.y_,before.x_);
        Vector dir(-after.y_,after.x_);
        Vector intersection;

        dir.invert();

        bool intersectionExists = computeIntersection(realPoint + before,
                                                      realPoint + after,
                                                      prevDir,dir,
                                                      intersection);

        if(intersectionExists)
        {
            float angle = computeAngle(dir,prevDir);
            float limit = 1/sin(angle/2);
           
            if(limit > stroker->miterLimit_)
            {
                stroker->bevelJoiner(stroker,point,beforeNormal,afterNormal);
                return;
            }

            stroker->scale_.transform(intersection);
            stroker->scale_.transform(after);

            stroker->addPoint(outer,intersection);
            stroker->addPoint(outer,point + after);
            
            stroker->innerJoiner(inner,point,after);
        }
        else
            stroker->bevelJoiner(stroker,point,beforeNormal,afterNormal);

    }
コード例 #7
0
ファイル: Lab7PW.c プロジェクト: cococo1/university
void rotatePoint(POINT *p,float angle,POINT center)
{

	float currentAngle;
/*	char str[256];


	sprintf(str,"%i \n",computeDistance(*p,center));
	OutputDebugStringA(str);*/
	currentAngle=computeAngle(*p,center);

	currentAngle+=angle;
	p->x=center.x+distance*cos(currentAngle);
	p->y=center.y+distance*sin(currentAngle);
}
コード例 #8
0
ファイル: optical_flow.cpp プロジェクト: lasoren/nba-vision
void OpticalFlow::computeOpticalFlow(Mat& cf){
	Mat current_frame;
	cvtColor(cf, current_frame, COLOR_BGR2GRAY);
	if( points[0].empty() ){
		buildPointGrid(current_frame);
	}
	if (buckets.empty()){
		buildBuckets(6, 30.0, 10);
		cout <<"first bucket angle :" << buckets[0].angle_max << endl;
	}
	if( !previous_frame.empty() ){
		vector<double> distance(points[0].size()), angle(points[0].size());
		vector<uchar> status;
		vector<float> err;

		calcOpticalFlowPyrLK(previous_frame, current_frame, points[0],
					points[1], status, err, winSize, 3, termcrit, 0, 0.01);	
		for( int i = 0; i < points[1].size(); i++ ){
                	if( !status[i] )
                    		continue;
			distance[i] = computeDistance(points[0][i], points[1][i]);
			angle[i] = computeAngle(points[0][i], points[1][i]);
			status[i] = assignBucket(distance[i], angle[i]);
	    	}
		Bucket max_bucket = maxBucket();
		cout << max_bucket.getCount() << endl;
		for( int i = 0; i < points[1].size(); i++ ){
                	if( !status[i])
                    		continue;
			if(!max_bucket.inBucket(distance[i], angle[i])){
				drawFlow(points[0][i], points[1][i], false, cf);
			}
	    	}
		//debug	
		if ( debug_ ){
			imshow(windowName, previous_frame);
		}
	}
	// update for next frame
	previous_frame = current_frame; // current frame becomes previous frame.
}
コード例 #9
0
ファイル: vplStroke.cpp プロジェクト: msahlen/vpl
    void Stroker::generateArc(const Vector& center,const Vector& start,
                              const Vector& end,Winding winding,
                              DynamicArray<float>* points)
    {

        Vector realStart = start;
        Vector realEnd = end;

        inversePathTransform_.transform(realStart);
        inversePathTransform_.transform(realEnd);

        float startAngle = computeOrientation(realStart);
        float diff = computeAngle(realStart,realEnd);
        float radius = size_ / 2;
        double ra = (std::fabs(radius) + std::fabs(radius)) / 2;
        double da = acos(ra/(ra + 0.125f)) * 2;

        vplUint numSteps = static_cast<vplUint>(diff/da);

        float angle = 0.0f;

        Vector point;

        for(vplUint i = 0;i < numSteps;i++)
        {
            if(winding == cClockwise)
                angle = startAngle + i / float(numSteps) * diff;
            else
                angle = startAngle - i / float(numSteps) * diff;

            Vector offset(cos(angle),sin(angle));

            offset *= radius;

            scale_.transform(offset);

            point = center + offset;

            addPoint(points,point);
        }
    }
コード例 #10
0
double ZSwcDeepAngleMetric::computeDeepAngle(
    const std::vector<const Swc_Tree_Node *> &nodeArray1,
    const std::vector<const Swc_Tree_Node *> &nodeArray2)
{
  if (nodeArray1.empty() || nodeArray2.empty()) {
    return Infinity;
  }

  if (nodeArray1.size() <= 1 || nodeArray2.size() <= 1) {
    if (SwcTreeNode::distance(
          nodeArray1[0], nodeArray2[0], SwcTreeNode::EUCLIDEAN_SURFACE) <=
        m_minDist) {
      return 0.0;
    } else {
      return Infinity;
    }
  }

  size_t s1 = nodeArray1.size() - 1;
  size_t s2 = nodeArray2.size() - 1;

  double minDist = Infinity;

  for (size_t i = 0; i < s1; ++i) {
    for (size_t j = 0; j < s2; ++j) {
      if (SwcTreeNode::distance(
            nodeArray1[i], nodeArray2[i], SwcTreeNode::EUCLIDEAN_SURFACE) <=
          m_minDist) {
        double dist = computeAngle(nodeArray1[i + 1], nodeArray1[i],
            nodeArray2[i], nodeArray2[i + 1]);
        if (dist < minDist) {
          minDist = dist;
        }
      }
    }
  }

  return minDist;
}
コード例 #11
0
hpreal CircularSimulationResult::getItem(hpvec3 point) {
	// Check whether the point is in our Z range, if not abort and return positive infinity
	if (!isInZRange(point)) {
		return INFINITY;
	}

	// Compute the angles of the triangle points to the reference dir
	hpreal angle = computeAngle(point);
	hpuint angleSlot = computeAngleSlot(angle);

	hpuint posZSlot = convertPosZToPosZSlot(point.z);
	hpuint slot = computeSlot(angleSlot, posZSlot);

	assert(angleSlot < m_angleSteps);
	assert(posZSlot < m_posZSteps);

	std::unordered_map<hpuint,hpreal>::const_iterator result = m_entries->find(slot);
	if (result == m_entries->end()) {
		return INFINITY;
	} else {
		return result->second;
	}
}
コード例 #12
0
void tryToUnderstand1(RobotPosition * pose,  std::list<Landmark *>* landmarks,std::vector<Landmark *> *  last_observations, std::vector<Landmark *> *  propagate_observations){
  double rpose[3];
  pose->getEstimateData(rpose);
  
  propagate_observations->clear();
  
  std::cout << "Robot Position:\t" << rpose[0] << "\t" << rpose[1] << "\t" << rpose[2] << "\n";
  
  double expected[last_observations->size()];	//	this will contain the projections of the previously seen landmarks in the current pose
  // I.E. this tells where we expect to see the landmarks propagated from the last step

  bool use_general_angle_tolerance[last_observations->size()];	// this will tell if the angle tolerance to be used is the general one or the bearing-only based one

  int associations[pose->observations.size()];	// this will contain the candidate associations, that will become effective ONLY IF there will be no ambiguity
  for(unsigned int i=0; i<pose->observations.size(); i++){
    associations[i] = -1;
  }

  std::cout << "We expect to see stuff at:\n";
  for(unsigned int i = 0; i<last_observations->size(); i++){	// populate the expected array
    Landmark * lm = (*last_observations)[i];

    if(lm->getObservations()->size() > 1){	// first case: landmark already has an estimated position
      expected[i] = computeAngle(lm, pose);
      use_general_angle_tolerance[i] = true;
    }
    else{	// second case: landmark has been seen only once up to now, hence there is no position estimation
      double prev_seen = (*(lm->getObservations()))[0]->bearing;
      double prev_theta = (*(lm->getObservations()))[0]->pose->theta();
      double rotated = computeAnglesDifference(pose->theta(), prev_theta);
      expected[i] = prev_seen-rotated;
      use_general_angle_tolerance[i] = false;
    }

    std::cout << expected[i] << "\n";
  }

  std::cout << "observations:\n";
  for(unsigned int obs_counter=0; obs_counter < pose->observations.size(); obs_counter++){	// for every observation in the current position
    std::cout << (pose->observations)[obs_counter].bearing << ": ";
    // Try to assign it to a previously generated landmark
    if(last_observations->size() > 0){

      unsigned int closer_index;
      unsigned int second_closer_index;
      // compute closer_index and second_closer_index.

      double distances[last_observations->size()];
      for(unsigned int i=0; i < last_observations->size(); i++){
	distances[i] = d_abs(computeAnglesDifference(expected[i], pose->observations[obs_counter].bearing));
      }	//	now 'distances' contains the distance between the currently considered observation and the expected values

      closer_index = 0;
      if(last_observations->size() == 1){
	second_closer_index = 0;
      }
      else{
	second_closer_index = 1;
	if(distances[1] < distances[0]){
	  closer_index = 1;
	  second_closer_index = 0;
	}
      }
      for (unsigned int i = 1; i < last_observations->size(); i++){
	if (distances[i] <  distances[closer_index]){
	  second_closer_index = closer_index;
	  closer_index = i;
	}
	else{
	  if(distances[i] <  distances[second_closer_index]){
	    second_closer_index = i;
	  }
	}
      }
      // if closer is "close enough" and second_closer is "far enough" (I.E. there is no ambiguity)
      // associate the current observation to 'closer'.
      double tolerance = GENERAL_ANGLE_TOLERANCE;
      if(!use_general_angle_tolerance[closer_index]){
	tolerance = BEAR_ONLY_ANGLE_TOLERANCE;
      }
      std::cout << "tolerance is " << tolerance << "\n";
      if(distances[closer_index] < tolerance){
	if((second_closer_index == closer_index) || (distances[second_closer_index] > SECOND_ANGLE_MIN_DISTANCE)){
	  associations[obs_counter] = closer_index;
	  std::cout << "close to " << expected[closer_index] << "\n";
	}
	else{
	  std::cout << "close to " << expected[closer_index] << " but also to " << expected[second_closer_index] << "\n";
	}
      }
      else{
	std:: cout << "is far from everything\n";
      }

      // NOTE:	Do not add the observation to the set inside the landmark corresponding to
      //			'closer' yet, because, if two or more NEW observations are associated to the same
      //			landmark, none of them will be added.
    }
  }

  // add the computed observations to the landmarks, but only if there is no ambiguity

  bool associated[last_observations->size()];	//	associated[i] will be true if there is at least 1 new observation associated to the i-th landmark in last_observations
  bool ambiguous[last_observations->size()];	//	ambiguous[i] will be true if there are at least 2 new observations associated to the i-th landmark in last_observations
  // initialize the arrays just created
  for(unsigned int i=0; i<last_observations->size(); i++){
    associated[i] = false;
    ambiguous[i] = false;
  }

  // tell which ones are ambiguous
  for(unsigned int i=0; i < pose->observations.size(); i++){
    unsigned int value = associations[i];
    if(value!=-1){
      if (!associated[value]){
	associated[value] = true;
      }
      else{	//	associated is true, hence there is already a new observation (or more) polling for the landmark
	ambiguous[value] = true;
      }
    }
  }

  // add non-ambiguous observations to the corresponding landmarks
  // and create new landmarks for the new observed unassociated observations
  std::cout << "adding observations to landmarks\n";
  for(unsigned int i=0; i < pose->observations.size(); i++){
    std::cout << "obs " << i << " ";
    if(associations[i] != -1){
      if(!ambiguous[associations[i]]){
	std::cout << "tailed to " << associations[i] << "\n";
	Landmark * lm = (*last_observations)[associations[i]];
	lm->addObservation(&(pose->observations[i]));
	lm->estimatePosition();
	lm->checkConfirmed(_confirm_obs);
	double newpos[2];
	(*last_observations)[associations[i]]->getPosition(newpos);
	std::cout << "\tnew position estimated:\t" << newpos[0] << "\t" << newpos[1] << "\n";
	propagate_observations->push_back((*last_observations)[associations[i]]);
      }
      else{
	std::cout << "should be tailed to " << associations[i] << ", but there is ambiguity\n";
      }
    }
    else{	// unassociated
      std::cout << "is a new landmark\n";
      Landmark * lm = new Landmark();
      lm->addObservation (&(pose->observations[i]));
      //			lm->estimatePosition();
      landmarks->push_back(lm);
      propagate_observations->push_back(lm);
    }
  }

  std::cout << "now checking landmarks from previous step\n";
  for(unsigned int i = 0; i<last_observations->size(); i++){
    std::cout << "prev_obs " << i << " ";
    Landmark * lm = (*last_observations)[i];
    if(!associated[i]){
      // for the landmarks expected to be seen that are not in the current set of observations, we have two cases:
      if(lm->isConfirmed()){
	// case one: the landmark has been confirmed observations, then it is reliable
	std::cout << "was old enough, CONFIRMED\n";
      }
      else{
	// case two: the landmark is pretty unreliable, then it is trashed
	std::cout << "was too young, REMOVED\n";
	landmarks->remove(lm);
	delete lm;
      }
    }
    else{
      if(ambiguous[i]){
	std::cout << "was ambiguous,";
	if(lm->isConfirmed()){
	  std::cout << " but it was considered reliable, PROPAGATED" << std::endl;
	  propagate_observations->push_back(lm);
	}
	else{
	  std::cout << " and it was pretty young, REMOVED" << std::endl;
	  landmarks->remove(lm);
	  delete lm;
	}
      }
      else{
	std::cout << "has been REINFORCED\n";
      }
    }
  }
  std::cout << "\n\n";
}
コード例 #13
0
QPainterPath ConnectionItem::drawPath(const QPointF& start, const QPointF& end)
{
    const qreal RADIUS = 1.5 * ConnectorItem::SIZE;
    const qreal ARC_RECT_SIZE = 2*RADIUS;
    QRectF arcRect(0, 0, ARC_RECT_SIZE, ARC_RECT_SIZE);
    qreal xDiff = end.x() - start.x();
    qreal yDiff = end.y() - start.y();
    qreal yOffset = 0;
    
    if(yDiff <= 0)
        yOffset = yDiff;
    else
        yOffset = 0;
    
    QPainterPath path;
    
    path.moveTo(start);
    if(xDiff > 0)
    {
        // the connections points forward
        if(xDiff > 2*RADIUS)
        {
            // start and end are so far from each other that the can
            // be directly connected (without loop)
            if(yDiff > 0)
            {
                // the connection points downwards
                if(fabs(yDiff) < 2*RADIUS)
                {
                    // start and end point are so close too each other that
                    // the arcs must be less than 90 degrees
                    qreal angle = computeAngle(RADIUS, yDiff/2);
                    qreal width = computeWidth(yDiff/2, angle);
                    
                    arcRect.moveTo(start.x() + xDiff/2 - width - RADIUS, start.y());
                    path.arcTo(arcRect, 90, -angle);
                    arcRect.moveTo(start.x() + xDiff/2 + width - RADIUS, end.y() - ARC_RECT_SIZE);
                    path.arcTo(arcRect, -90 - angle, angle);
                }
                else
                {
                    // two 90 degree arcs are possible
                    arcRect.moveTo(start.x() + xDiff/2 - ARC_RECT_SIZE, start.y());
                    path.arcTo(arcRect, 90, -90);
                    arcRect.moveTo(start.x() + xDiff/2, end.y() - ARC_RECT_SIZE);
                    path.arcTo(arcRect, 180, 90);
                }
            }
            else
            {
                // the connection points upwards
                if(fabs(yDiff) < 2*RADIUS)
                {
                    // start and end point are so close too each other that
                    // the arcs must be less than 90 degrees
                    qreal angle = computeAngle(RADIUS, yDiff/2);
                    qreal width = computeWidth(yDiff/2, angle);
                    
                    arcRect.moveTo(start.x() + xDiff/2 - width - RADIUS, start.y() - ARC_RECT_SIZE);
                    path.arcTo(arcRect, 270, angle);
                    arcRect.moveTo(start.x() + xDiff/2 + width - RADIUS, end.y());
                    path.arcTo(arcRect, 90 + angle, -angle);
                }
                else
                {
                    // two 90 degree arcs are possible
                    arcRect.moveTo(start.x() + xDiff/2 - ARC_RECT_SIZE, start.y() - ARC_RECT_SIZE);
                    path.arcTo(arcRect, 270, 90);
                    arcRect.moveTo(start.x() + xDiff/2, end.y());
                    path.arcTo(arcRect, 180, -90);
                }
            }
        }
        else
        {
            // start and end are so close to each other to each other
            // that the connections must have loop
            arcRect.moveTo(start.x() + xDiff/2 - RADIUS, start.y() - ARC_RECT_SIZE);
            path.arcTo(arcRect, -90, 90);
            arcRect.moveTo(start.x() + xDiff/2 - RADIUS, start.y() + yOffset - 2*RADIUS);
            path.arcTo(arcRect, 0, 90);
            arcRect.moveTo(start.x() + xDiff/2 - RADIUS, start.y() + yOffset - 2*RADIUS);
            path.arcTo(arcRect, 90, 90);
            arcRect.moveTo(start.x() + xDiff/2 - RADIUS, end.y() - ARC_RECT_SIZE);
            path.arcTo(arcRect, 180, 90);
        }
    }
    else
    {
        // the connections points backward
        if(fabs(yDiff) > 4 * RADIUS)
        {            
            // start and end are so far from each other that the can
            // be directly with a line between the to operators
            if(yDiff > 0)
            {
                // the connection points downwards
                arcRect.moveTo(start.x() - RADIUS, start.y());
                path.arcTo(arcRect, 90, -90);
                arcRect.moveTo(start.x() - RADIUS, start.y() + yDiff/2 - ARC_RECT_SIZE);
                path.arcTo(arcRect, 0, -90);
                arcRect.moveTo(end.x() - RADIUS, start.y() + yDiff/2);
                path.arcTo(arcRect, 90, 90);
                arcRect.moveTo(end.x() - RADIUS, end.y() - ARC_RECT_SIZE);
                path.arcTo(arcRect, 180, 90);
            }
            else
            {
                // the connection points upwards
                arcRect.moveTo(start.x() - RADIUS, start.y() - ARC_RECT_SIZE);
                path.arcTo(arcRect, 270, 90);
                arcRect.moveTo(start.x() - RADIUS, start.y() + yDiff/2);
                path.arcTo(arcRect, 0, 90);
                arcRect.moveTo(end.x() - RADIUS, start.y() + yDiff/2 - ARC_RECT_SIZE);
                path.arcTo(arcRect, 270, -90);
                arcRect.moveTo(end.x() - RADIUS, end.y());
                path.arcTo(arcRect, 180, -90);
            }
        }
        else
        {
            // start and end are so close to each other to each other
            // that the connection must run around one of the operators
            arcRect.moveTo(start.x() - RADIUS, start.y() - ARC_RECT_SIZE);
            path.arcTo(arcRect, -90, 90);
            arcRect.moveTo(start.x() - RADIUS, start.y() - ARC_RECT_SIZE + yOffset - EXTRA_HEIGHT);
            path.arcTo(arcRect, 0, 90);
            arcRect.moveTo(end.x() - RADIUS, start.y() - ARC_RECT_SIZE + yOffset - EXTRA_HEIGHT);
            path.arcTo(arcRect, 90, 90);
            arcRect.moveTo(end.x() - RADIUS, end.y() - ARC_RECT_SIZE);
            path.arcTo(arcRect, 180, 90);
        }
    }
    path.lineTo(end);
    
    return path;
}
コード例 #14
0
ファイル: unmanagedReg.cpp プロジェクト: hcilab-um/tPad
void paperRegistration::detectFigures(cv::vector<cv::vector<cv::Point>>& squares, cv::vector<cv::vector<cv::Point>>& triangles,
	float minLength, float maxLength, int tresh_binary)
{
	if (currentDeviceImg.empty())
		return;
	
	//cv::Mat image = currentDeviceImg;
	//cv::Mat image = cv::imread("C:/Users/sophie/Desktop/meinz.png", CV_LOAD_IMAGE_GRAYSCALE);// cv::imread(path, CV_LOAD_IMAGE_GRAYSCALE);  
	//resize(image, image, cv::Size(500,700));

	squares.clear();  
	triangles.clear();	
	
	cv::Mat gray;
	cv::Mat element = getStructuringElement(cv::MORPH_RECT, cv::Size(7,7));
	cv::vector<cv::vector<cv::Point> > contours;

	//compute binary image
	//use dilatation and erosion to improve edges
	threshold(currentDeviceImg, gray, tresh_binary, 255, cv::THRESH_BINARY_INV);	
	dilate(gray, gray, element, cv::Point(-1,-1));
	erode(gray, gray, element, cv::Point(-1,-1));
	
	// find contours and store them all as a list
	cv::findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE);
	
	//test each contour
	cv::vector<cv::Point> approx;
	cv::vector<cv::vector<cv::Point> >::iterator iterEnd = contours.end();
	for(cv::vector<cv::vector<cv::Point> >::iterator iter = contours.begin(); iter != iterEnd; ++iter)
	{
		// approximate contour with accuracy proportional
		// to the contour perimeter
		cv::approxPolyDP(*iter, approx, arcLength(*iter, true)*0.03, true);
	     
		//contours should be convex
		if (isContourConvex(approx))
		{
			// square contours should have 4 vertices after approximation and 
			// relatively large length (to filter out noisy contours)
			if( approx.size() == 4)
			{
				bool rectangular = true;	 
				for( int j = 3; j < 6; j++ )
				{
					// if cosines of all angles are small
					// (all angles are ~90 degree) then write
					// vertices to result
					 if (fabs(90 - fabs(computeAngle(approx[j%4], approx[j-3], approx[j-2]))) > 7)
					 {
						rectangular = false;
						break;
					 }
				}
				
				if (!rectangular)
					continue;
				
				float side1 = computeLength(approx[0], approx[1]);
				float side2 = computeLength(approx[1], approx[2]);
					
				if (side1 > minLength && side1 < maxLength && 
					side2 > minLength && side2 < maxLength)
					squares.push_back(approx);
			}		
			// triangle contours should have 3 vertices after approximation and 
			// relatively large length (to filter out noisy contours)
			else if ( approx.size() == 3)
			{
				float side1 = computeLength(approx[0], approx[1]);
				float side2 = computeLength(approx[1], approx[2]);
				float side3 = computeLength(approx[2], approx[0]);
				
				if (side1 > minLength && side1 < maxLength && 
					side2 > minLength && side2 < maxLength &&
					side3 > minLength && side3 < maxLength)
					triangles.push_back(approx);
			}
		}
	}
}
コード例 #15
0
ファイル: vpMeEllipse.cpp プロジェクト: tswang/visp
void
vpMeEllipse::initTracking(const vpImage<unsigned char> &I, const unsigned int n,
			  unsigned *i, unsigned *j)
{
  vpCDEBUG(1) <<" begin vpMeEllipse::initTracking()"<<std::endl ;

  if (circle==false)
  {
    vpMatrix A(n,5) ;
    vpColVector b_(n) ;
    vpColVector x(5) ;

    // Construction du systeme Ax=b
    //! i^2 + K0 j^2 + 2 K1 i j + 2 K2 i + 2 K3 j + K4
    // A = (j^2 2ij 2i 2j 1)   x = (K0 K1 K2 K3 K4)^T  b = (-i^2 )

    for (unsigned int k =0 ; k < n ; k++)
    {
      A[k][0] = vpMath::sqr(j[k]) ;
      A[k][1] = 2* i[k] * j[k] ;
      A[k][2] = 2* i[k] ;
      A[k][3] = 2* j[k] ;
      A[k][4] = 1 ;

      b_[k] = - vpMath::sqr(i[k]) ;
    }

    K = A.pseudoInverse(1e-26)*b_ ;
    std::cout << K << std::endl;
  }
  else
  {
    vpMatrix A(n,3) ;
    vpColVector b_(n) ;
    vpColVector x(3) ;

    vpColVector Kc(3) ;
    for (unsigned int k =0 ; k < n ; k++)
    {
      A[k][0] =  2* i[k] ;
      A[k][1] =  2* j[k] ;

      A[k][2] = 1 ;
      b_[k] = - vpMath::sqr(i[k]) - vpMath::sqr(j[k]) ;
    }

    Kc = A.pseudoInverse(1e-26)*b_ ;
    K[0] = 1 ;
    K[1] = 0 ;
    K[2] = Kc[0] ;
    K[3] = Kc[1] ;
    K[4] = Kc[2] ;

    std::cout << K << std::endl;
  }
  iP1.set_i( i[0] );
  iP1.set_j( j[0] );
  iP2.set_i( i[n-1] );
  iP2.set_j( j[n-1] );

  getParameters() ;
  computeAngle(iP1, iP2) ;
  display(I, vpColor::green) ;

  sample(I) ;

  //  2. On appelle ce qui n'est pas specifique
  {
    vpMeTracker::initTracking(I) ;
  }

  try{
    track(I) ;
  }
  catch(...)
  {
    vpERROR_TRACE("Error caught") ;
    throw ;
  }
  vpMeTracker::display(I) ;
  vpDisplay::flush(I) ;

}
コード例 #16
0
ファイル: Terrain.cpp プロジェクト: Aggroo/nebula-trifid
static bool buildSmoothNormals(
	udword nbTris, udword nbVerts,
	const Point* verts,
	const udword* dFaces, const uword* wFaces,
	Point* normals,
	bool flip)
	{
	// Checkings
	if(!verts || !normals || !nbTris || !nbVerts)	return false;

	// Get correct destination buffers
	// - if available, write directly to user-provided buffers
	// - else get some ram and keep track of it
	Point* FNormals = new Point[nbTris];
	if(!FNormals) return false;

	// Compute face normals
	udword c = (flip!=0);
	for(udword i=0;i<nbTris;i++)
		{
		udword Ref0 = dFaces ? dFaces[i*3+0]   : wFaces ? wFaces[i*3+0]   : 0;
		udword Ref1 = dFaces ? dFaces[i*3+1+c] : wFaces ? wFaces[i*3+1+c] : 1;
		udword Ref2 = dFaces ? dFaces[i*3+2-c] : wFaces ? wFaces[i*3+2-c] : 2;

		FNormals[i] = (verts[Ref2]-verts[Ref0])^(verts[Ref1] - verts[Ref0]);
		assert(!FNormals[i].IsZero());
		FNormals[i].Normalize();
		}

	// Compute vertex normals
	memset(normals, 0, nbVerts*sizeof(Point));

	Point* TmpNormals = new Point[nbVerts];
	memset(TmpNormals, 0, nbVerts*sizeof(Point));
	for(udword i=0;i<nbTris;i++)
		{
		udword Ref[3];
		Ref[0] = dFaces ? dFaces[i*3+0] : wFaces ? wFaces[i*3+0] : 0;
		Ref[1] = dFaces ? dFaces[i*3+1] : wFaces ? wFaces[i*3+1] : 1;
		Ref[2] = dFaces ? dFaces[i*3+2] : wFaces ? wFaces[i*3+2] : 2;

		for(udword j=0;j<3;j++)
			{
			if(TmpNormals[Ref[j]].IsZero())
				TmpNormals[Ref[j]] = FNormals[i];
			}
		}

	for(udword i=0;i<nbTris;i++)
		{
		udword Ref[3];
		Ref[0] = dFaces ? dFaces[i*3+0] : wFaces ? wFaces[i*3+0] : 0;
		Ref[1] = dFaces ? dFaces[i*3+1] : wFaces ? wFaces[i*3+1] : 1;
		Ref[2] = dFaces ? dFaces[i*3+2] : wFaces ? wFaces[i*3+2] : 2;

		normals[Ref[0]] += FNormals[i] * computeAngle(verts, Ref, Ref[0]);
		normals[Ref[1]] += FNormals[i] * computeAngle(verts, Ref, Ref[1]);
		normals[Ref[2]] += FNormals[i] * computeAngle(verts, Ref, Ref[2]);
		}

	// Normalize vertex normals
	for(udword i=0;i<nbVerts;i++)
		{
		if(normals[i].IsZero())
			normals[i] = TmpNormals[i];
		assert(!normals[i].IsZero());
		normals[i].Normalize();
		}

	DELETEARRAY(TmpNormals);
	DELETEARRAY(FNormals);

	return true;
	}
コード例 #17
0
ファイル: vpMeEllipse.cpp プロジェクト: tswang/visp
/*!
  Initialization of the tracking. The ellipse is defined thanks to the
  coordinates of n points.

  \warning It is better to use at least five points to well estimate the K parameters.

  \param I : Image in which the ellipse appears.
  \param n : The number of points in the list.
  \param iP : A pointer to a list of pointsbelonging to the ellipse edge.
*/
void
vpMeEllipse::initTracking(const vpImage<unsigned char> &I, const unsigned int n,
			  vpImagePoint *iP)
{
  vpCDEBUG(1) <<" begin vpMeEllipse::initTracking()"<<std::endl ;

  if (circle==false)
  {
    vpMatrix A(n,5) ;
    vpColVector b_(n) ;
    vpColVector x(5) ;

    // Construction du systeme Ax=b
    //! i^2 + K0 j^2 + 2 K1 i j + 2 K2 i + 2 K3 j + K4
    // A = (j^2 2ij 2i 2j 1)   x = (K0 K1 K2 K3 K4)^T  b = (-i^2 )

    for (unsigned int k =0 ; k < n ; k++)
    {
      A[k][0] = vpMath::sqr(iP[k].get_j()) ;
      A[k][1] = 2* iP[k].get_i() * iP[k].get_j() ;
      A[k][2] = 2* iP[k].get_i() ;
      A[k][3] = 2* iP[k].get_j() ;
      A[k][4] = 1 ;

      b_[k] = - vpMath::sqr(iP[k].get_i()) ;
    }

    K = A.pseudoInverse(1e-26)*b_ ;
    std::cout << K << std::endl;
  }
  else
  {
    vpMatrix A(n,3) ;
    vpColVector b_(n) ;
    vpColVector x(3) ;

    vpColVector Kc(3) ;
    for (unsigned int k =0 ; k < n ; k++)
    {
      A[k][0] =  2* iP[k].get_i() ;
      A[k][1] =  2* iP[k].get_j() ;
      A[k][2] = 1 ;

      b_[k] = - vpMath::sqr(iP[k].get_i()) - vpMath::sqr(iP[k].get_j()) ;
    }

    Kc = A.pseudoInverse(1e-26)*b_ ;
    K[0] = 1 ;
    K[1] = 0 ;
    K[2] = Kc[0] ;
    K[3] = Kc[1] ;
    K[4] = Kc[2] ;

    std::cout << K << std::endl;
  }

  iP1 = iP[0];
  iP2 = iP[n-1];

  getParameters() ;
  std::cout << "vpMeEllipse::initTracking() ellipse avant: " << iPc << " " << a << " " << b << " " << vpMath::deg(e) << " alpha: " << alpha1 << " " << alpha2 << std::endl;

  computeAngle(iP1, iP2) ;
  std::cout << "vpMeEllipse::initTracking() ellipse apres: " << iPc << " " << a << " " << b << " " << vpMath::deg(e) << " alpha: " << alpha1 << " " << alpha2 << std::endl;

  expecteddensity = (alpha2-alpha1) / vpMath::rad((double)me->getSampleStep());

  display(I, vpColor::green) ;
  sample(I) ;

  //  2. On appelle ce qui n'est pas specifique
  {
    vpMeTracker::initTracking(I) ;
  }

  try{
    track(I) ;
  }
  catch(...)
  {
    vpERROR_TRACE("Error caught") ;
    throw ;
  }
  vpMeTracker::display(I) ;
  vpDisplay::flush(I) ;

}
コード例 #18
0
void update(){
  // set fake walk/rotation values
  fake_walk = walk;
  if(fake_walk > 0){ // we assume that the robot understands that it is still, when it is
    fake_walk -= max_walk_error;
    fake_walk += ((double)(rand())/(double)(RAND_MAX)) * 2 * max_walk_error;
    if(fake_walk < 0){fake_walk = 0;}
  }
  fake_rotation = rotation;
  fake_rotation -= max_rotation_error;
  fake_rotation += ((double)(rand())/(double)(RAND_MAX)) * 2 * max_rotation_error;
  

  // update robot position and rotation
  if(walk > 0){
    // modify robot position
    current_pose.x = current_pose.x + cos(current_pose.theta) * walk;
    current_pose.y = current_pose.y + sin(current_pose.theta) * walk;
    current_pose.theta += rotation;
    current_pose.theta = normalizeAngle(current_pose.theta);
    
    // modify robot position guess
    fake_current_pose.x = fake_current_pose.x + cos(fake_current_pose.theta) * fake_walk;
    fake_current_pose.y = fake_current_pose.y + sin(fake_current_pose.theta) * fake_walk;
    fake_current_pose.theta += fake_rotation;
    fake_current_pose.theta = normalizeAngle(fake_current_pose.theta);

    move_amount += walk;
    rotation_amount += d_abs(rotation);
  }
  
  // check capture triggers
  if(move_amount > move_trigger || rotation_amount > rotation_trigger){
    // std::cout << std::endl;
    // std::cout << "moved: " << move_amount << "; rotated: " << rotation_amount << "; ";
    // if (move_amount > move_trigger){
    //   std::cout << "move trigger; ";
    // }
    // if (rotation_amount > rotation_trigger){
    //   std::cout << "rotation trigger; ";
    // }
    // std::cout << std::endl;
    move_amount = 0;
    rotation_amount = 0;
    time_to_capture = true;
  }
  
  // eventually capture
  if(time_to_capture){
    // reset time_to_capture
    time_to_capture = false;
    
    // detect in-range landmarks
    for(unsigned int i=0; i<landmarks.size(); i++){

      // compute the distance between the landmark and the robot
      double dx = landmarks[i].x - current_pose.x;
      double dy = landmarks[i].y - current_pose.y;
      double distance = sqrt(pow(dx,2) + pow(dy,2));
      
      // check if the landmark is within the range
      if(distance < range){
	detected_landmarks.push_back(&(landmarks[i]));
	perceptions.push_back(computeAngle(&landmarks[i], &current_pose));
      }
    }
    
    // compute transformation matrix between last capture pose and current pose (and do the same for the robot guess)
    Eigen::Matrix3d m1 = r2m(last_captured_pose);
    Eigen::Matrix3d m2 = r2m(current_pose);
    Eigen::Matrix3d fm1 = r2m(fake_last_captured_pose);
    Eigen::Matrix3d fm2 = r2m(fake_current_pose);
    RobotPose transform = m2r(m1.inverse()*m2);
    RobotPose fake_transform = m2r(fm1.inverse()*fm2);
    
    // write stuff on files
    truth_out << transform.x << " " << transform.y << " " << transform.theta;
    noised_out << fake_transform.x << " " << fake_transform.y << " " << fake_transform.theta;
    
    for(unsigned int i = 0; i<perceptions.size(); i++){
      truth_out << " " << perceptions[i];
      noised_out << " " << perceptions[i] - max_perception_error + 2 * max_perception_error * ((double)rand())/((double)RAND_MAX);
    }
    truth_out << std::endl;
    noised_out << std::endl;
    
    // clear the vectors
    detected_landmarks.clear();
    perceptions.clear();
    
    // update last_captured_pose
    last_captured_pose = current_pose;
    fake_last_captured_pose = fake_current_pose;
  }
}