float ParticleFilterLocalizer::getMeasurementProbability(Particle* particle, const Measurements& measurements) {
    float probability = 1.0f;
    float expectedDistance;
	float expectedAngle;
	float measuredDistance;
	float measuredAngle;
    std::string landmarkName;
    Landmark* landmark;
    LandmarkMap::iterator landmarkSearch;

    for (Measurements::const_iterator it = measurements.begin(); it != measurements.end(); it++) {
        landmarkName = it->first;
		Measurement measurement = it->second;
        measuredDistance = measurement.distance;
        measuredAngle = measurement.angle;
        landmarkSearch = landmarks.find(landmarkName);

        if (landmarkSearch == landmarks.end()) {
            std::cout << "- Didnt find landmark '" << landmarkName << "', this should not happen" << std::endl;

            continue;
        }

        landmark = landmarkSearch->second;
        expectedDistance = Math::distanceBetween(particle->x, particle->y, landmark->x, landmark->y);
		expectedAngle = Math::getAngleBetween(Math::Position(landmark->x, landmark->y), Math::Position(particle->x, particle->y), particle->orientation);
		
		probability *= Math::getGaussian(expectedDistance, distanceSenseNoise, measuredDistance) * 0.75
			+ Math::getGaussian(expectedAngle, angleSenseNoise, measuredAngle) * 0.25;
    }

    return probability;
}
示例#2
0
void ManualController::updateParticleLocalizer(double dt) {
	Robot::Movement movement = robot->getMovement();
	Measurements measurements;

	Object* yellowGoal = vision->getLargestGoal(Side::YELLOW);
	Object* blueGoal = vision->getLargestGoal(Side::BLUE);

	if (yellowGoal != NULL) {
		measurements["yellow-center"] = Measurement(Math::min(Math::max(yellowGoal->distance, 0.0f), Config::fieldWidth), yellowGoal->angle);
	}

	if (blueGoal != NULL) {
		measurements["blue-center"] = Measurement(Math::min(Math::max(blueGoal->distance, 0.0f), Config::fieldWidth), blueGoal->angle);
	}

	//float velocityMagnitude = Math::Vector(movement.velocityX, movement.velocityY).getLength();

	//if (velocityMagnitude > 0.05 && measurements.size() > 0) {
		particleLocalizer.update(measurements);
		particleLocalizer.move(movement.velocityX, movement.velocityY, movement.omega, dt, measurements.size() == 0 ? true : false);
	//}

	Math::Position position = particleLocalizer.getPosition();
}
示例#3
0
void Robot::processMessage()
{
    unsigned int id = _currentMessage.getSenderId();
    if (id == MASTER_ID && _commId < _currentMessage.getMessageId())
    {
        _commId = _currentMessage.getMessageId();
    }

    if (_currentMessage.getType() == Message::MASTER)
    {
        _id = _currentMessage.getReceiverId();
        Logger::getInstance()->log("New ID aqcuired: " + std::to_string(_id), Logger::INFO);
    }

    if (_currentMessage.getType() == Message::MEASURE)
    {
        Measurements m;
        for (auto & s : _currentMessage.getParameters())
            m.push_back(static_cast<Sensor::SensorType> (transcode<int>(s)));

        _state->getBehaviour()->setMeasurements(m);
    }

    // Replace behaviour with a new one.
    if (_currentMessage.getType() == Message::BEHAVIOUR)
    {
        try
        {
            Logger::getInstance()->log("Obtaining Behaviour parameters...", Logger::VERBOSE);
            StringVector parameters = _currentMessage.getParameters();

            if (parameters.size() == 0)
            {
                // TODO Generate Event instead.

                Logger::getInstance()->log("Behaviour parameters incorrect!", Logger::WARNING);
                return;
            }

            _behaviourSet = false;
            unsigned int t = transcode<unsigned int>(parameters[0]);
            Behaviour::BehaviourType type = static_cast<Behaviour::BehaviourType> (t);

            // Predefined Behaviour
            if (type != Behaviour::CUSTOM)
            {
                Logger::getInstance()->log("Loading predefined Behaviour: " + std::to_string(type), Logger::INFO);
                SharedPtrBehaviour behaviour = generateBehaviour(type,
                        StringVector(parameters.begin() + 1, parameters.end()));
                _state->setBehaviour(behaviour);

                if (_state->getBehaviour().get() == nullptr || _state->getBehaviour() == nullptr)
                {
                    send(Message(_id, MASTER_ID, 0, Message::NOT,{}));
                }
                else
                {
                    send(Message(_id, MASTER_ID, 0, Message::ACK,{}));
                }
            }
            else
            {
                Logger::getInstance()->log("Loading custom Behaviour...", Logger::VERBOSE);
                // Custom behaviour creation.
            }
        }
        catch (...)
        {
            // TODO

            Logger::getInstance()->log("Processing Behaviour failed!", Logger::ERROR);
        }
    }
}
示例#4
0
int track(string videopath){
    cout<<"start track "<<videopath<<endl;
    QString outpath=QString::fromStdString(videopath);
    int dotpos=outpath.indexOf('.');
    int strlen=outpath.length();
    outpath=outpath.replace(dotpos,strlen-dotpos,".infrared.track");
    cout<<"outpath is "<<outpath.toStdString()<<endl;

    ofstream out(outpath.toStdString());
    VideoCapture cap(videopath);
    if(!cap.isOpened())  // check if we succeeded
        return -1;

    Mat roi,fg,gray,bin;
    namedWindow("video input",CV_WINDOW_NORMAL);
    namedWindow("foreground mask",CV_WINDOW_NORMAL);
    int frameNum=0;

    BackgroundSubtractorSuBSENSE bgs;
    vector<vector<Point> > contours;
    vector<Vec4i> hierarchy;
    RNG rng(12345);
    for(;;frameNum++)
    {
        Mat frame;
        cap >> frame; // get a new frame from camera
        if(frame.empty()){
            break;
        }
        resize(frame,frame,Size(0,0),1.0/sqrt(speedUp),1.0/sqrt(speedUp));
        //init
        if(frameNum==0){
            fg.create(frame.size(),CV_8UC1);
            roi.create(frame.size(),CV_8UC1);
            roi.setTo(255);
            bgs.initialize(frame,roi);
            bin.create(frame.size(),CV_8UC1);
            cvtColor(frame,gray,CV_BGR2GRAY);
            threshold(gray,bin,thresh,255,THRESH_BINARY);
        }//detect
        else{
//            bgs(frame,fg,double(frameNum<=100));
            cvtColor(frame,gray,CV_BGR2GRAY);
            threshold(gray,bin,thresh,255,THRESH_BINARY);
            cout<<"thresh="<<thresh<<endl;
//            fg=fg|bin;
            fg=bin;

            //            vector<vector<Point> > contours0;
            Mat img=fg.clone();
            Mat kernel=cv::getStructuringElement(cv::MORPH_ELLIPSE,cv::Size(minObjectSize,minObjectSize));
            cv::morphologyEx(img,img,MORPH_OPEN,kernel);
            contours.clear();
            findContours( img, contours, hierarchy, CV_RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);

            //            contours.resize(contours0.size());
            //            for( size_t k = 0; k < contours0.size(); k++ )
            //                approxPolyDP(Mat(contours0[k]), contours[k], 3, true);

            /// Get the moments
            vector<Moments> mu(contours.size() );
            for( int i = 0; i < contours.size(); i++ )
            { mu[i] = moments( contours[i], false ); }

            ///  Get the mass centers:
            vector<Point2f> mc( contours.size() );
            Measurements m;
            for( int i = 0; i < contours.size(); i++ )
            {
                mc[i] = Point2f( mu[i].m10/mu[i].m00 , mu[i].m01/mu[i].m00 );
//                mc[i][0]=mc[i][0]/img.rows;
//                mc[i][1]=mc[i][1]/img.cols;
                m.emplace_back(Measurement(mc[i].x,mc[i].y));
            }

            /// Draw contours
            Mat drawing = frame;
            for( int i = 0; i< contours.size(); i++ )
            {
                Scalar color = Scalar( rng.uniform(0, 255), rng.uniform(0,255), rng.uniform(0,255) );
                drawContours( drawing, contours, i, color, 2, 8, hierarchy, 0, Point() );
                circle( drawing, mc[i], 4, color, -1, 8, 0 );
            }

            for ( const auto & x : m )
            {
                const int r = 5;
                // draw measurement
                circle(drawing, Point(x(0), x(1)), 2*r, Scalar(0, 0, 0));
                line(drawing, Point(x(0) - r, x(1) - r), Point(x(0) + r, x(1) + r), Scalar(0, 0, 0) );
                line(drawing, Point(x(0) - r, x(1) + r), Point(x(0) + r, x(1) - r), Scalar(0, 0, 0) );
            }

            tracker.track(m);

            tracklist.update(tracker,out,frameNum);
            for ( const auto & filter : tracker.filters() )
            {

                const GNNTracker::StateSpaceVector s = filter.state();
                const GNNTracker::StateSpaceVector p = filter.prediction();

                // draw filter position
                circle(drawing, Point(s(0), s(1)), 5, Scalar(0, 0, 255));

                // draw filter prediction
                circle(drawing, Point(p(0), p(1)), 5, Scalar(255, 0, 0));

                // draw filter velocity (scaled)
                line(drawing, Point(s(0), s(1)), Point(s(0) + 5 * s(2), s(1) + 5 * s(3)), Scalar(0, 255, 0));

                stringstream id;

                id << filter.id();

                putText(drawing, id.str(), cv::Point2i(s(0) + 10, s(1)), cv::FONT_HERSHEY_PLAIN, 1, cv::Scalar(255, 0, 255));

            }

            imshow("video input", drawing);
            imshow("foreground mask",fg);
            imshow("bin",bin);

        }

        if(frameNum<=100){
            cout<<"init "<<videopath<<" frameNum="<<frameNum<<endl;
        }
        else{
            cout<<"track "<<outpath.toStdString()<<" frameNum="<<frameNum<<endl;
        }

        if(waitKey(30) >= 0) break;
    }

    out.close();
    return 0;
}
示例#5
0
int main()
{
    std::cout << "###############################" << std::endl;
    std::cout << "press q to quit" << std::endl;
    std::cout << "left click and hold mouse on image to create additional measurement" << std::endl;

    // 4-dimensional state, 2-dimensional measurements
    typedef Tracker<4, 2> GNNTracker;
    GNNTracker tracker;

    typedef GNNTracker::MeasurementSpaceVector Measurement;
    typedef GNNTracker::Measurements Measurements;

    namedWindow( "Kalman Demo", CV_WINDOW_AUTOSIZE );

    int * mouse = new int[2]();
    mouse[0] = mouse[1] = -1;

    setMouseCallback("Kalman Demo",CallBackFunc, mouse);

    int k = 0;
    size_t step = 0;

    while ( k != 1048689 ) //'q'
    {
        Mat img(480, 640, CV_8UC3, Scalar(255, 255, 255));

        Measurements m;

        // build some virtual measurements
        m.emplace_back(Measurement(320 + 120 * std::sin(step * .05), 240 + 100 * std::cos(step * .05)));
        m.emplace_back(Measurement(240 - 120 * std::sin(step * .05), 240 + 100 * std::cos(step * .05)));
        m.emplace_back(Measurement(320 - 120 * std::sin(step * .05), 240 - 200 * std::cos(step * .05)));

        // get measurement from mouse coordinates
        if (mouse[0] != -1 && mouse[1] != -1)
            m.emplace_back(Measurement(mouse[0], mouse[1]));

        step++;

        for ( const auto & x : m )
        {
            const int r = 5;
            // draw measurement
            circle(img, Point(x(0), x(1)), 2*r, Scalar(0, 0, 0));
            line(img, Point(x(0) - r, x(1) - r), Point(x(0) + r, x(1) + r), Scalar(0, 0, 0) );
            line(img, Point(x(0) - r, x(1) + r), Point(x(0) + r, x(1) - r), Scalar(0, 0, 0) );
        }

        tracker.track(m);

        for ( const auto & filter : tracker.filters() )
        {

            const GNNTracker::StateSpaceVector s = filter.state();
            const GNNTracker::StateSpaceVector p = filter.prediction();

            // draw filter position
            circle(img, Point(s(0), s(1)), 5, Scalar(0, 0, 255));

            // draw filter prediction
            circle(img, Point(p(0), p(1)), 5, Scalar(255, 0, 0));

            // draw filter velocity (scaled)
            line(img, Point(s(0), s(1)), Point(s(0) + 5 * s(2), s(1) + 5 * s(3)), Scalar(0, 255, 0));

            stringstream id;

            id << filter.id();

            putText(img, id.str(), cv::Point2i(s(0) + 10, s(1)), cv::FONT_HERSHEY_PLAIN, 1, cv::Scalar(0, 0, 0));

        }

        imshow( "Kalman Demo", img );

        k = cv::waitKey(30);
    }
    return 0;
}