// karelerin icine rakamlari ve kabul/iptal tuslarini basar
void DrawScreen::drawNumbers(){
	drawButtonArea();
	// draw 1
	if (keyTurn == 1)
		putText(frames, intToString(1), cv::Point(116, 140), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 3);
	else{
		putText(frames, intToString(1), cv::Point(119, 143), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, SHADOW_COLOR, 3);
		putText(frames, intToString(1), cv::Point(116, 140), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 2);
	}
	// draw 2
	if (keyTurn == 2)
		putText(frames, intToString(2), cv::Point(216, 140), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 3);
	else{
		putText(frames, intToString(2), cv::Point(219, 143), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, SHADOW_COLOR, 3);
		putText(frames, intToString(2), cv::Point(216, 140), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 2);
	}
	// draw 3
	if (keyTurn == 3)
		putText(frames, intToString(3), cv::Point(316, 140), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 3);
	else{
		putText(frames, intToString(3), cv::Point(319, 143), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, SHADOW_COLOR, 3);
		putText(frames, intToString(3), cv::Point(316, 140), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 2);
	}
	// draw 4
	if (keyTurn == 4)
		putText(frames, intToString(4), cv::Point(116, 240), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 3);
	else{
		putText(frames, intToString(4), cv::Point(119, 243), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, SHADOW_COLOR, 3);
		putText(frames, intToString(4), cv::Point(116, 240), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 2);
	}
	// draw 5
	if (keyTurn == 5)
		putText(frames, intToString(5), cv::Point(216, 240), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 3);
	else{
		putText(frames, intToString(5), cv::Point(219, 243), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, SHADOW_COLOR, 3);
		putText(frames, intToString(5), cv::Point(216, 240), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 2);
	}
	// draw 6
	if (keyTurn == 6)
		putText(frames, intToString(6), cv::Point(316, 240), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 3);
	else{
		putText(frames, intToString(6), cv::Point(319, 243), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, SHADOW_COLOR, 3);
		putText(frames, intToString(6), cv::Point(316, 240), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 2);
	}
	// draw 7
	if (keyTurn == 7)
		putText(frames, intToString(7), cv::Point(116, 340), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 3);
	else{
		putText(frames, intToString(7), cv::Point(119, 343), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, SHADOW_COLOR, 3);
		putText(frames, intToString(7), cv::Point(116, 340), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 2);
	}
	// draw 8
	if (keyTurn == 8)
		putText(frames, intToString(8), cv::Point(216, 340), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 3);
	else{
		putText(frames, intToString(8), cv::Point(219, 343), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, SHADOW_COLOR, 3);
		putText(frames, intToString(8), cv::Point(216, 340), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 2);
	}
	// draw 9
	if (keyTurn == 9)
		putText(frames, intToString(9), cv::Point(316, 340), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 3);
	else{
		putText(frames, intToString(9), cv::Point(319, 343), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, SHADOW_COLOR, 3);
		putText(frames, intToString(9), cv::Point(316, 340), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 2);
	}
	// draw 0
	if (keyTurn == 11)
		putText(frames, intToString(0), cv::Point(216, 440), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 3);
	else{
		putText(frames, intToString(0), cv::Point(219, 443), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, SHADOW_COLOR, 3);
		putText(frames, intToString(0), cv::Point(216, 440), cv::FONT_HERSHEY_COMPLEX_SMALL, 3, NUMBER_COLOR, 2);
	}
	// draw right	
	if (keyTurn == 12){
		line(frames, cv::Point(335, 440), cv::Point(350, 405), NUMBER_COLOR, 4, 8, 0);
		line(frames, cv::Point(320, 420), cv::Point(335, 440), NUMBER_COLOR, 4, 8, 0);
	}
	else{
		line(frames, cv::Point(338, 443), cv::Point(353, 408), SHADOW_COLOR, 3, 8, 0);
		line(frames, cv::Point(323, 423), cv::Point(338, 443), SHADOW_COLOR, 3, 8, 0);
		line(frames, cv::Point(335, 440), cv::Point(350, 405), NUMBER_COLOR, 3, 8, 0);
		line(frames, cv::Point(320, 420), cv::Point(335, 440), NUMBER_COLOR, 3, 8, 0);
	}
	// draw false
	if (keyTurn == 10){
		line(frames, cv::Point(120, 405), cv::Point(150, 435), NUMBER_COLOR, 4, 8, 0);
		line(frames, cv::Point(150, 405), cv::Point(120, 435), NUMBER_COLOR, 4, 8, 0);
	}
	else{
		line(frames, cv::Point(123, 408), cv::Point(153, 438), SHADOW_COLOR, 3, 8, 0);
		line(frames, cv::Point(153, 408), cv::Point(123, 438), SHADOW_COLOR, 3, 8, 0);
		line(frames, cv::Point(120, 405), cv::Point(150, 435), NUMBER_COLOR, 3, 8, 0);
		line(frames, cv::Point(150, 405), cv::Point(120, 435), NUMBER_COLOR, 3, 8, 0);
	}
}
Example #2
0
void Dashboard::putMonoText(int x, int y, const string& text, int size, SDLColor color)
{
    putText(x, y, text.c_str(), size, color, true);
}
void EntryExitCounter::process(FrameList &frames)
{
    if(frames.hasPrevious())
    {
        for(unsigned int n = 0; n < frames.getCurrent().getCameras().size(); n++)
        {
            if(frames.hasDoorMask())
            {
                CameraObject  *cameraCurr = &frames.getCurrent().getCameras()[n];
                CameraObject  *cameraPrev = &frames.getPrevious().getCameras()[n];
                cameraCurr->setEntered(cameraPrev->getEntered()); //Get data from last frame
                cameraCurr->setExited(cameraPrev->getExited());   //Get data from last frame
                cv::Mat doorMask = frames.getDoorMask(); //Get the door mask

                for(std::vector<Object>::iterator object = cameraCurr->getTransitionaryObjects().begin(); object != cameraCurr->getTransitionaryObjects().end(); object++)
                {
                    cv::Point2d pos = object->exitPoint;
                    if(isInsidePolygon(doorMask, pos) && object->hasPassedMasksOne && object->hasPassedMasksTwo && object->hasPassedMasksThree)
                    {
                        cameraCurr->setExited(cameraCurr->getExited()+1);
                    }
                }
                cameraCurr->getTransitionaryObjects().clear();

                for(std::vector<Object>::iterator object = cameraCurr->getObjects().begin(); object != cameraCurr->getObjects().end(); object++)
                {
                    cv::Point2d entryPosition = object->entryPoint;
                    if(isInsidePolygon(doorMask, entryPosition) && object->hasPassedMasksOne && object->hasPassedMasksTwo && object->hasPassedMasksThree && !object->hasAlreadyEntered)
                    {
                        cameraCurr->setEntered(cameraCurr->getEntered()+1);
                        object->hasAlreadyEntered = true;
                    }
                }

                //Set population for a specific RoomID corresponding to the current camera.
                std::string currentRoomID = frames.getCurrent().getCameras()[n].getRoomID();
                int exitedThisFrame = cameraCurr->getExited()-cameraPrev->getExited();
                int enteredThisFrame =  cameraCurr->getEntered()-cameraPrev->getEntered();
                int prevPopulation = frames.getPrevious().getPopulationInRoomID(currentRoomID);
                frames.getCurrent().setPopulationInRoomID(prevPopulation+enteredThisFrame-exitedThisFrame, currentRoomID);


                //------------------ Debug writes nr of people that enters/exits into debugImage ------------------//
                if(!cameraCurr->hasImage("debugImage"))
                    cameraCurr->addImage("debugImage", cameraCurr->getImage("rawImage").clone());
                cv::Mat debugImage = cameraCurr->getImage("debugImage");
                std::string text = "";
                std::string text2 = "";
                int fontFace = cv::FONT_HERSHEY_PLAIN;
                double fontScale = 1;
                int thickness = 1;
                cv::Point2d pos1(10,20);
                cv::Point2d pos2(10,40);
                text = "Entered: " + std::to_string(cameraCurr->getEntered());
                putText(debugImage, text, pos1, fontFace, fontScale, cv::Scalar(0,255,0), thickness, 8);
                text2 = "Exited: " + std::to_string(cameraCurr->getExited());
                putText(debugImage, text2, pos2, fontFace, fontScale, cv::Scalar(0,255,0), thickness, 8);
                //------------------------------------------------------------------------------------------------//
            }
        }

        /* Sum all room populations into one. Since roomId's are always different from each other,
           totalPopulation is really a debug variable that now is just printed. Works only
           for one camera at the moment.*/
        totalPopulation = 0;
        for(unsigned int n = 0; n < frames.getCurrent().getCameras().size(); n++) {
            std::string currentRoomID = frames.getCurrent().getCameras()[n].getRoomID();
            totalPopulation = totalPopulation + frames.getCurrent().getPopulationInRoomID(currentRoomID);
        }
        //--------------------------------- Debug, writes population to debugImage --------------------------------//
        std::vector<CameraObject> cameras = frames.getCurrent().getCameras();
        if(cameras.size() > 0){
            //CameraObject  *cameraCurr = &frames.getCurrent().getCameras()[0];
            CameraObject  *cameraCurr = &cameras[0];
            std::string text = "";
            int fontFace = cv::FONT_HERSHEY_PLAIN;
            double fontScale = 1;
            text = "Is inside: " + std::to_string(totalPopulation);
            cv::Point2d pos3(10,60);
            cv::Mat debugImage = cameraCurr->getImage("debugImage");
            putText(debugImage, text, pos3, fontFace, fontScale, cv::Scalar(0,255,0), 1, 8);
        }
        //--------------------------------------------------------------------------------------------------------//
    }
}
Example #4
0
int ProcessFrame(cv::Mat *frame, cv::Mat *fg_mask, double tick) {

    double dT = ((tick - prev_tick ) / cv::getTickFrequency()); //seconds
    prev_tick = tick;
    if(with_fps) {
        printf("FPS ticks : %f\n", (float) 1 / dT);
    }
    cv::Mat hsv;
    cvtColor(*frame, hsv, CV_BGR2HSV);

    cv::erode(*fg_mask, *fg_mask, cv::Mat(), cv::Point(-1, -1), 5);
    cv::dilate(*fg_mask, *fg_mask, cv::Mat(), cv::Point(-1, -1), 8);

    if(with_gui) {
        cv::imshow("Threshold", *fg_mask);
    }
    vector<vector<cv::Point>> contours;
    cv::findContours(*fg_mask, contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);

    vector<contour_t> found_contures;
    contour_t new_contour;
    int counter = 0;

    for (size_t i = 0; i < contours.size(); i++) {
        cv::Rect bBox;
        cv::Moments mu;
        bBox = cv::boundingRect(contours[i]);
        mu = moments( contours[i], false);

        if (bBox.area() >= min_area) {
            new_contour.id = counter;
            new_contour.contours = contours[i];
            new_contour.mu = mu;
            new_contour.contour_use = false;
            counter++;
            found_contures.push_back(new_contour);
        }
    }

    loadValidCounureToObject(found_contures, tracked_objects);                             //načítanie všetkých vzdialeností od kontur a usporiadanie

    std::sort(tracked_objects.begin(),tracked_objects.end(),comp);
    for (size_t i = 0; i < tracked_objects.size(); i++) {
        tracked_objects[i].set_index_object((int) i);
        if (tracked_objects[i].selected_counture.size() >= 1){                           //ak má objekt v okolí nejaké kontúry
            found_contures[tracked_objects[i].selected_counture[0].ID].candidate_object.push_back(tracked_objects[i]);    //pushne do contúry svoje ID
        }
    }
    for (size_t i = 0; i < tracked_objects.size(); i++) {


        int contourID = parsingContours(found_contures, tracked_objects[i]);

        if (contourID == -1){
            if (tracked_objects[i].counter() < 2) {
                tracked_objects.erase(tracked_objects.begin() + i);
                i--;
                continue;
            }
            else {
                if (!(tracked_objects[i].last_y_pos() > frame_height - frame_height / 6 ||
                        tracked_objects[i].last_y_pos() < frame_height / 6)) {
                    tracked_objects[i].kalmanMakeCalculate(*frame, dT);
                }
                else {
                    if (((tracked_objects[i].starting_y_pos() < frame_height / 2 &&
                            tracked_objects[i].last_y_pos() <  frame_height / 6 ) ||
                            (tracked_objects[i].starting_y_pos() > frame_height / 2 &&
                                    tracked_objects[i].last_y_pos() > frame_height - frame_height / 6)) &&
                            !(tracked_objects[i].change_startin_pos())) {

                        tracked_objects[i].kalmanMakeCalculate(*frame, dT);
                    }
                    else {
                        counterAbsPersonFlow((int) i);
                        tracked_objects.erase(tracked_objects.begin() + i);
                        i--;
                        continue;
                    }

                }
            }
            if (tracked_objects[i].get_usingRate() > 30) {
                counterAbsPersonFlow((int) i);
                tracked_objects.erase(tracked_objects.begin() + i);
                i--;
                continue;
            }
        }
        else{
            found_contures[contourID].contour_use = true;
            cv::MatND hist;// = CalcHistogramBase(hsv, found_contures[contourID].contours, contourID, tracked_objects[i].hist());
            tracked_objects[i].kalmanMakeCalculate(*frame, found_contures[contourID].mu, dT, hist);
            if (tracked_objects[i].starting_y_pos() < frame_height / 2 && tracked_objects[i].last_y_pos() > frame_height - frame_height / 4 ){
                if (counterAbsPersonFlow((int) i) == 0) {
                    tracked_objects[i].set_startingYpos(frame_height);
                    tracked_objects[i].set_change_startin_pos(true);
                }
            }
            if (tracked_objects[i].starting_y_pos() > frame_height / 2 && tracked_objects[i].last_y_pos() < frame_height / 4 ){
                if(counterAbsPersonFlow((int) i) == 0) {
                    tracked_objects[i].set_startingYpos(0);
                    tracked_objects[i].set_change_startin_pos(true);
                }
            }
        }
    }

    for (size_t i = 0; i < found_contures.size(); i++) {
        if (!found_contures[i].contour_use) {

            bool create = true;
            double x = found_contures[i].mu.m10 / found_contures[i].mu.m00;
            double y = found_contures[i].mu.m01 / found_contures[i].mu.m00;


            for (size_t k = 0; k < tracked_objects.size(); k++) {
                double distance = CalcDistance(x, tracked_objects[k].last_x_pos(), y, tracked_objects[k].last_y_pos());
                if (min_dist_to_create > distance) {
                    create = false;
                }
            }
            if (create) {
                kalmanCont newObject;
                newObject.set_id(id);
                newObject.set_startingYpos(y);
                newObject.set_startingXpos(x);

                cv::MatND hist;// = CalcHistogramContour(hsv, found_contures[i].contours, (int) i);
                newObject.kalmanMakeCalculate(*frame, found_contures[i].mu, dT, hist);

                tracked_objects.push_back(newObject);
                id++;
                id = (id > 10) ? 0 : id;
            }
        }
        found_contures[i].contour_use = false;
    }
    for (size_t i = 0; i < tracked_objects.size(); i++) {

        tracked_objects[i].add_usingRate();
        tracked_objects[i].set_counter();
        tracked_objects[i].clear_history_frams();
        if (with_gui) {
            cv::Point center;
            center.x = (int) tracked_objects[i].last_x_pos();
            center.y = (int) tracked_objects[i].last_y_pos();
            cv::circle(*frame, center, 2, CV_RGB(tracked_objects[i].R, tracked_objects[i].G, tracked_objects[i].B), -1);
            stringstream sstr;
            sstr << "Objekt" << tracked_objects[i].id();
            cv::putText(*frame, sstr.str(), cv::Point(center.x + 3, center.y - 3), cv::FONT_HERSHEY_SIMPLEX, 0.5,
                            CV_RGB(tracked_objects[i].R, tracked_objects[i].G, tracked_objects[i].B), 2);
        }
    }
    if (with_gui) {
        stringstream ss;
        ss << out;
        string counter1 = ss.str();
        putText(*frame, counter1.c_str(), cv::Point(5, 30), FONT_HERSHEY_SCRIPT_SIMPLEX, 1, cv::Scalar(0, 255, 0),1);

        stringstream ss2;
        ss2 << in;
        string counter2 = ss2.str();
        putText(*frame, counter2.c_str(), cv::Point(5, frame_height - 30), FONT_HERSHEY_SCRIPT_SIMPLEX, 1, cv::Scalar(0, 0, 255),1);
        cv::imshow("Tracking", *frame);
    }
    if (!with_gui){
       // printf("in: %d, out: %d\n",in,out);
    }
    return 0;

}
/*****************************************************************************
 // The knn matching with k = 2
 // This code performs the matching and the refinement.
 // @paraam query_image: the input image
 // @param matches_out: a pointer that stores the output matches. It is necessary for
 //                     pose estimation.
 */
int knn_match(cv::Mat& query_image,  std::vector< cv::DMatch> * matches_out)
{
    // variabels that keep the query keypoints and query descriptors
    std::vector<cv::KeyPoint>           keypointsQuery;
    cv::Mat                             descriptorQuery;
    
    // Temporary variables for the matching results
    std::vector< std::vector< cv::DMatch> > matches1;
    std::vector< std::vector< cv::DMatch> > matches2;
    std::vector< std::vector< cv::DMatch> > matches_opt1;
    
    
    //////////////////////////////////////////////////////////////////////
    // 1. Detect the keypoints
    // This line detects keypoints in the query image
    _detector->detect(query_image, keypointsQuery);
    
    // If keypoints were found, descriptors are extracted.
    if(keypointsQuery.size() > 0)
    {
        // extract descriptors
        _extractor->compute( query_image, keypointsQuery, descriptorQuery);
        
    }
    
    //////////////////////////////////////////////////////////////////////////////
    // 2. Here we match the descriptors with the database descriptors.
    // with k-nearest neighbors with k=2
    _matcher.knnMatch(descriptorQuery , matches1, 2);
    
#ifdef DEBUG_OUT
    std::cout << "Found " << matches1.size() << " matching feature descriptors out of " << _matcher.getTrainDescriptors().size() << " database descriptors."  << std::endl;
#endif
    
    
    //////////////////////////////////////////////////////////////////////////////
    // 3 Filter the matches.
    // Accept only matches (knn with k=2) which belong ot one images
    // The database tree within _matcher contains descriptors of all input images.
    // We expect that both nearest neighbors must belong to one image.
    // Otherwise we can remove the result.
    // Along with this, we count which reference image has the highest number of matches.
    // At this time, we consinder this image as the searched image.
    
    // we init the variable hit with 0
    std::vector<int> hits(_num_ref_images);
    for (int i=0; i<_num_ref_images; i++)
    {
        hits[i] = 0;
    }
    
    // the loop runs through all matches and comparees the image indes
    // imgIdx. The must be equal otherwise the matches belong to two
    // different reference images.
    for (int i=0; i<matches1.size(); i++)
    {
        // The comparison.
        if(matches1[i].at(0).imgIdx == matches1[i].at(1).imgIdx)
        {
            // we keep it
            matches_opt1.push_back(matches1[i]);
            // and count a hit
            hits[matches1[i].at(0).imgIdx]++;
        }
    }
    
#ifdef DEBUG_OUT
    std::cout << "Optimized " << matches_opt1.size() << " feature descriptors." << std::endl;
#endif
    
    // Now we search for the highest number of hits in our hit array
    // The variable max_idx keeps the image id.
    // The variable max_value the amount of hits.
    int max_idx = -1;
    int max_value = 0;
    for (int i=0; i<_num_ref_images; i++)
    {
#ifdef DEBUG_OUT
        std::cout << "for " << i << " : " << hits[i] << std::endl;
#endif
        if(hits[i]  > max_value)
        {
            max_value = hits[i];
            max_idx = i;
        }
    }
    
    
    
    ///////////////////////////////////////////////////////
    // 4. The cross-match
    // At this time, we test the database agains the query descriptors.
    // The variable max_id stores the reference image id. Thus, we test only
    // the descriptors that belong to max_idx agains the query descriptors.
    _matcher.knnMatch(_descriptorsRefDB[max_idx], descriptorQuery, matches2, 2);
    
    
    ///////////////////////////////////////////////////////
    // 5. Refinement; Ratio test
    // The ratio test only accept matches which are clear without ambiguity.
    // The best hit must be closer to the query descriptors than the second hit.
    int removed = ratioTest(matches_opt1);
#ifdef DEBUG_OUT
    std::cout << "Removed " << removed << " matched."  << std::endl;
#endif
    
    removed = ratioTest(matches2);
#ifdef DEBUG_OUT
    std::cout << "Removed " << removed << " matched."  << std::endl;
#endif
    
    ///////////////////////////////////////////////////////
    // 6. Refinement; Symmetry test
    // We only accept matches which appear in both knn-matches.
    // It should not matter whether we test the database against the query desriptors
    // or the query descriptors against the database.
    // If we do not find the same solution in both directions, we toss the match.
    std::vector<cv::DMatch> symMatches;
    symmetryTest(  matches_opt1, matches2, symMatches);
#ifdef DEBUG_OUT
    std::cout << "Kept " << symMatches.size() << " matches after symetry test test."  << std::endl;
#endif
    
    ///////////////////////////////////////////////////////
    // 7. Refinement; Epipolar constraint
    // We perform a Epipolar test using the RANSAC method.
    if(symMatches.size() > 25)
    {
        matches_out->clear();
        ransacTest( symMatches,  _keypointsRefDB[max_idx], keypointsQuery, *matches_out);
        
        
    }
    
#ifdef DEBUG_OUT
    std::cout << "Kept " << matches_out->size() << " matches after RANSAC test."  << std::endl;
#endif
    
    ///////////////////////////////////////////////////////
    // 8.  Draw this image on screen.
    cv::Mat out;
    cv::drawMatches(feature_map_database[max_idx]._ref_image , _keypointsRefDB[max_idx], query_image, keypointsQuery, *matches_out, out, cv::Scalar(255,255,255), cv::Scalar(0,0,255));
    
    std::string num_matches_str;
    std::strstream conv;
    conv << matches_out->size();
    conv >> num_matches_str;
    
    std::string text;
    text.append( num_matches_str);
    text.append("( " + _num_ref_features_in_db_str + " total)");
    text.append(" matches were found in reference image ");
    text.append( feature_map_database[max_idx]._ref_image_str);
    
    putText(out, text, cvPoint(20,20),
            cv::FONT_HERSHEY_COMPLEX_SMALL, 1.0, cvScalar(0,255,255), 1, CV_AA);
    
    cv::imshow("result", out);
    if (run_video) cv::waitKey(1);
    else cv::waitKey();
    
    
    
    // Delete the images
    query_image.release();
    out.release();
    
    
    
    return max_idx;
    
}
Example #6
0
void camera_feed()
{
	VideoCapture cap(0);
	if (cap.isOpened())
	{
		int distance[3], MUL = 1, dif = 0;
		char key;
		bool first_run = false, is_size_checked = false, moved = false, shoot = false;
		unsigned long max_contours_amount = 0;
		Point drawing_point, cursor, additional_point;
		vector<vector<Point>> contours, main_points;
		vector<Point> pen1, pen2, pens;
		vector<Vec4i> hierarchy;
		Mat frame, real_pic, drawing_frame, maze;
		Scalar low_boundry(45, 107, 52), high_boundry(86, 227, 160), color(100, 100, 100);
		//namedWindow("drawing_frame", 1);
		//namedWindow("frame", 1);
		cap >> frame;
		cursor = Point(20, 20);
		maze = imread("maze1.jpg");
		maze = maze / WHITE;
		maze = maze * WHITE;
		bitwise_not(maze, maze);
		
		

		RECT rect = { 0 }; // gaming stuff!
		HWND window = FindWindow("Chicken Invaders 5", "Chicken Invaders 5");
		Sleep(2000);
		if (window)
		{
			GetClientRect(window, &rect);
			SetForegroundWindow(window);
			SetActiveWindow(window);
			SetFocus(window);
		}

		while (true)
		{
			shoot = false;
			cap >> frame;
			real_pic = frame.clone();
			while (main_points.size() != 0)
			{
				main_points.pop_back();
			}
			if (!first_run)
			{
				drawing_frame = real_pic.clone();
				resize(drawing_frame, drawing_frame, Size(GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN) - 50));
				resize(maze, maze, Size(GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN) - 50));
				first_run = true;
			}
			flip(real_pic, real_pic, 1);

			cvtColor(frame, frame, COLOR_BGR2HSV);
			
			inRange(frame, low_boundry, high_boundry, frame);
			flip(frame, frame, 1);

			contours.clear();
			resize(frame, frame, Size(GetSystemMetrics(SM_CXSCREEN), GetSystemMetrics(SM_CYSCREEN)));
			findContours(frame, contours, hierarchy, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
			is_size_checked = false;
			if (contours.size() != 0)
			{
				for (vector<vector<Point>>::iterator it = contours.begin(); it != contours.end(); it++)
				{
					if (it->size() > max_contours_amount * 0.7)
					{
						main_points.push_back(*it);
						max_contours_amount = it->size();
						is_size_checked = true;
					}
				}
			}
			if (is_size_checked)
			{
				moved = false;
				drawing_point = stabilized_point(main_points[0]);
				if (main_points.size() == 2)
				{
					if (stabilized_point(main_points[0]).x < stabilized_point(main_points[1]).x)
					{
						drawing_point = stabilized_point(main_points[1]);
						
					}
					shoot = true;
				}
				drawing_point.x += (drawing_point.x - drawing_frame.size().width / 2) / 10;
				drawing_point.y += (drawing_point.y - drawing_frame.size().height / 2) / 10;
				while (drawing_point.x > maze.size().width)
				{
					drawing_point.x--;
				}
				while (drawing_point.x < 0)
				{
					drawing_point.x++;

				}
				while (drawing_point.y > maze.size().height)
				{
					drawing_point.y--;
				}
				while (drawing_point.y < 0)
				{
					drawing_point.y++;
				}

				distance[0] = drawing_point.x - cursor.x;
				distance[1] = drawing_point.y - cursor.y;
				while (distance[0] != 0 && distance[1] != 0)
				{
					if (maze.at<Vec3b>(Point(cursor.x + distance[0] / 15, cursor.y))[0] != WHITE)
					{
						cursor.x += distance[0] / 15;
						distance[0] /= 15;
						moved = true;
					}
					if (maze.at<Vec3b>(Point(cursor.x, cursor.y + distance[1] / 15))[0] != WHITE)
					{
						cursor.y += distance[1] / 15;
						distance[1] /= 15;
						moved = true;
					}				
					if (!moved)
					{
						putText(drawing_frame, "Struck a wall!", Point(0, 40), FONT_HERSHEY_COMPLEX_SMALL, 1, Scalar(WHITE, WHITE, BLACK, 1), 1, CV_AA);
						distance[0] = 0;
						distance[1] = 0;
					}
					
				}
				SetCursorPos(drawing_point.x, drawing_point.y); // gaming stuff!
				circle(drawing_frame, cursor, 13, Scalar(WHITE, BLACK, WHITE), 2);
				circle(drawing_frame, drawing_point, 13, Scalar(WHITE, BLACK, WHITE), -1);
				//circle(drawing_frame, stabilized_point(pen1), 13, Scalar(WHITE, WHITE, BLACK), -1);
			}
			else
			{
				putText(drawing_frame, "Lost drawing object!", Point(0, 20), FONT_HERSHEY_COMPLEX_SMALL, 1, Scalar(WHITE, WHITE, BLACK, 1), 1, CV_AA);
				circle(drawing_frame, cursor, 13, Scalar(WHITE, WHITE, BLACK), 3);
			}
			if (shoot)
			{
				LeftClick(drawing_point.x, drawing_point.y);
			}
			key = waitKey(10);

			drawing_frame = maze + drawing_frame;
			bitwise_not(drawing_frame, drawing_frame);
			//imshow("drawing_frame", drawing_frame);
			//imshow("frame", frame);

			frame = BLACK;
			drawing_frame = BLACK;
			real_pic = BLACK;

		}
	}
Example #7
0
void CImageProc::findHomeRobot( void ){
	for (row=1; row < IMAGE_HEIGHT; row++){
		label[0][row] = 0;
		color[0][row] = 0;
	}

	for (col=0; col < IMAGE_WIDTH; col++){
		label[col][0] = 0;
		color[col][0] = 0;
	}

	numcolor = 0;
	eq_set.clear();
	eq_set.push_back(0);
	luas_area.clear();
	locatex.clear();
	locatey.clear();
	label2color.clear();

	for (row=1; row < IMAGE_HEIGHT; row++){
		for (col=1; col < IMAGE_WIDTH; col++){
			if		 (colorDistance(MatImage.at<cv::Vec3b>(row, col), homeFirstColor) < thresholdval) {
				color[col][row] = 1;
			}else /**/if (colorDistance(MatImage.at<cv::Vec3b>(row, col), homeSecondColor) < thresholdval) {
				color[col][row] = 2;
			} else /**/if (colorDistance(MatImage.at<cv::Vec3b>(row, col), homeThirdColor) < thresholdval) {
				color[col][row] = 3;
			} else /**/if (colorDistance(MatImage.at<cv::Vec3b>(row, col), secondaryColor) < thresholdval) {
				color[col][row] = 4;
			} else {
				label[col][row] = 0;
				color[col][row] = 0;
			}
			
			if (color[col][row] != 0){
				if		 (color[col][row-1] == color[col][row] && color[col-1][row] != color[col][row]){
					label[col][row] = label[col][row-1];
				} else if(color[col-1][row] == color[col][row] && color[col][row-1] != color[col][row]){
					label[col][row] = label[col-1][row];
				} else if(color[col-1][row]   == color[col][row] && color[col][row-1]   == color[col][row]){
					label[col][row] = label[col][row-1];
					if (label[col-1][row] != label[col][row]){
						eq_set[label[col-1][row]] = eq_set[label[col][row]];
					}
				} else {
					numcolor++;
					label[col][row] = numcolor;
					eq_set.push_back(numcolor);
				}
			} /**/
		}
	} 

	for (row=0; row < IMAGE_HEIGHT; row++){
		for (col=0; col < IMAGE_WIDTH; col++){
			if(eq_set[label[col][row]] != label[col][row])
				label[col][row] = eq_set[label[col][row]];

			if(label[col][row] != 0){
				label2color[label[col][row]] = color[col][row];
				luas_area[label[col][row]]++;
				locatex[label[col][row]] += col;
				locatey[label[col][row]] += row;
			}
		}
	} 

	for(it = luas_area.begin(); it != luas_area.end() ; it++){
		if((*it).second < 100)
			continue;

		cv::Point2i thisPoint;
		thisPoint.x = locatex[(*it).first]/(*it).second;
		thisPoint.y = locatey[(*it).first]/(*it).second;

		if(label2color[(*it).first] == 1)
			firstColorPoint.push_back(thisPoint);
		else if(label2color[(*it).first] == 2)
			secondColorPoint.push_back(thisPoint);
		else if(label2color[(*it).first] == 3)
			thirdColorPoint.push_back(thisPoint);
		else 
			secondaryColorPoint.push_back(thisPoint);
	}

	for(int i=0; i<firstColorPoint.size(); i++){
		dx = 0;
		dy = 0;
		dst = 0;
		nearestTeamPoint = -1;
		nearestDistance = 9999;
		isSecondary = false;
		for(int j = 0; j<homeTeamColorPoint.size(); j++){
			dx = abs(homeTeamColorPoint[j].x-firstColorPoint[i].x);
			dy = abs(homeTeamColorPoint[j].y-firstColorPoint[i].y);
			dst = (short)sqrt((double)dx*dx+dy*dy);
			if( dst < nearestDistance){
				nearestTeamPoint = j;
				nearestDistance = dst;
			}
		}
		if(nearestDistance>30) continue;

		nearestSecondPoint = -1;
		nearestDistance = 9999;
		for(int j=0; j<secondaryColorPoint.size(); j++){
			dx = abs(secondaryColorPoint[j].x-firstColorPoint[i].x);
			dy = abs(secondaryColorPoint[j].y-firstColorPoint[i].y);
			dst = (short)sqrt((double)dx*dx+dy*dy);
			if( dst < nearestDistance){
				nearestSecondPoint = j;
				nearestDistance = dst;
			}
		}

		int x = 0;
		if(nearestDistance<=30){
			isSecondary = true;
			x = 4;
		} else
			x = 1;

		dsy = (homeTeamColorPoint[nearestTeamPoint].y - firstColorPoint[i].y);
		dsx = (firstColorPoint[i].x - homeTeamColorPoint[nearestTeamPoint].x);
		angle = (int)(atan2(dsy,dsx)*180/M_PI)+45;
		locx = (firstColorPoint[i].x + homeTeamColorPoint[nearestTeamPoint].x)/2;
		locy = (firstColorPoint[i].y + homeTeamColorPoint[nearestTeamPoint].y)/2;

		cout << "Team " << x << " location : "
			 << locx << ","
			 << locy << ","
			 << (angle>=0?angle:(angle+360)) << endl;

		//cout << x << " : " << dsy << " " << dsx << " : " << a << endl;
		//cout << "\t" << atan2(-45.0,-45.0)*180/M_PI+360 << endl;
		//cout << "\t" << homeTeamColorPoint[nearestTeamPoint].x << ","
		//			 << homeTeamColorPoint[nearestTeamPoint].y << " vs "
		//			 << secondColorPoint[i].x << ","
		//			 << secondColorPoint[i].y << endl;

		rectangle(MatImage, 
				  cv::Point(locx-5, locy-5), 
				  cv::Point(locx+5, locy+5), 
				  cv::Scalar(255,255,0,0));
		sprintf_s( text, "%d", x);
		putText(MatImage, text , cv::Point(locx, locy), cv::FONT_HERSHEY_COMPLEX_SMALL, 1.0, cv::Scalar(255,255,255));
	}

	for(int i=0; i<secondColorPoint.size(); i++){
		dx = 0;
		dy = 0;
		dst = 0;
		nearestTeamPoint = -1;
		nearestDistance = 9999;
		isSecondary = false;
		for(int j = 0; j<homeTeamColorPoint.size(); j++){
			dx = abs(homeTeamColorPoint[j].x-secondColorPoint[i].x);
			dy = abs(homeTeamColorPoint[j].y-secondColorPoint[i].y);
			dst = (short)sqrt((double)dx*dx+dy*dy);
			if( dst < nearestDistance){
				nearestTeamPoint = j;
				nearestDistance = dst;
			}
		}
		if(nearestDistance>30) continue;

		nearestSecondPoint = -1;
		nearestDistance = 9999;
		for(int j=0; j<secondaryColorPoint.size(); j++){
			dx = abs(secondaryColorPoint[j].x-secondColorPoint[i].x);
			dy = abs(secondaryColorPoint[j].y-secondColorPoint[i].y);
			dst = (short)sqrt((double)dx*dx+dy*dy);
			if( dst < nearestDistance){
				nearestSecondPoint = j;
				nearestDistance = dst;
			}
		}

		int x = 0;
		if(nearestDistance<=30){
			isSecondary = true;
			x = 5;
		} else
			x = 2;

		dsy = (homeTeamColorPoint[nearestTeamPoint].y - secondColorPoint[i].y);
		dsx = (secondColorPoint[i].x - homeTeamColorPoint[nearestTeamPoint].x);
		angle = (int)(atan2(dsy,dsx)*180/M_PI)+45;
		locx = (secondColorPoint[i].x + homeTeamColorPoint[nearestTeamPoint].x)/2;
		locy = (secondColorPoint[i].y + homeTeamColorPoint[nearestTeamPoint].y)/2;

		cout << "Team " << x << " location : "
			 << locx << ","
			 << locy << ","
			 << (angle>=0?angle:(angle+360)) << endl;

		//cout << x << " : " << dsy << " " << dsx << " : " << a << endl;
		//cout << "\t" << atan2(-45.0,-45.0)*180/M_PI+360 << endl;
		//cout << "\t" << homeTeamColorPoint[nearestTeamPoint].x << ","
		//			 << homeTeamColorPoint[nearestTeamPoint].y << " vs "
		//			 << secondColorPoint[i].x << ","
		//			 << secondColorPoint[i].y << endl;

		rectangle(MatImage, 
				  cv::Point(locx-5, locy-5), 
				  cv::Point(locx+5, locy+5), 
				  cv::Scalar(255,255,0,0));
		sprintf_s( text, "%d", x);
		putText(MatImage, text , cv::Point(locx, locy), cv::FONT_HERSHEY_COMPLEX_SMALL, 1.0, cv::Scalar(255,255,255));
	}

	for(int i=0; i<thirdColorPoint.size(); i++){
		dx = 0;
		dy = 0;
		dst = 0;
		nearestTeamPoint = -1;
		nearestDistance = 9999;
		isSecondary = false;
		for(int j = 0; j<homeTeamColorPoint.size(); j++){
			dx = abs(homeTeamColorPoint[j].x-thirdColorPoint[i].x);
			dy = abs(homeTeamColorPoint[j].y-thirdColorPoint[i].y);
			dst = (short)sqrt((double)dx*dx+dy*dy);
			if( dst < nearestDistance){
				nearestTeamPoint = j;
				nearestDistance = dst;
			}
		}
		if(nearestDistance>30) continue;

		short nearestPoint = -1;
		nearestDistance = 9999;
		for(int j=0; j<secondaryColorPoint.size(); j++){
			dx = abs(secondaryColorPoint[j].x-thirdColorPoint[i].x);
			dy = abs(secondaryColorPoint[j].y-thirdColorPoint[i].y);
			dst = (short)sqrt((double)dx*dx+dy*dy);
			if( dst < nearestDistance){
				nearestPoint = j;
				nearestDistance = dst;
			}
		}

		int x = 0;
		if(nearestDistance<=30){
			isSecondary = true;
			x = 6;
		} else
			x = 3;

		dsy = (homeTeamColorPoint[nearestTeamPoint].y - thirdColorPoint[i].y);
		dsx = (thirdColorPoint[i].x - homeTeamColorPoint[nearestTeamPoint].x);
		angle = (int)(atan2(dsy,dsx)*180/M_PI)+45;
		locx = (thirdColorPoint[i].x + homeTeamColorPoint[nearestTeamPoint].x)/2;
		locy = (thirdColorPoint[i].y + homeTeamColorPoint[nearestTeamPoint].y)/2;

		cout << "Team " << x << " location : "
			 << locx << ","
			 << locy << ","
			 << (angle>=0?angle:(angle+360)) << endl;

		//cout << x << " : " << dsy << " " << dsx << " : " << a << endl;
		//cout << "\t" << atan2(-45.0,-45.0)*180/M_PI+360 << endl;
		//cout << "\t" << homeTeamColorPoint[nearestTeamPoint].x << ","
		//			 << homeTeamColorPoint[nearestTeamPoint].y << " vs "
		//			 << secondColorPoint[i].x << ","
		//			 << secondColorPoint[i].y << endl;

		rectangle(MatImage, 
				  cv::Point(locx-5, locy-5), 
				  cv::Point(locx+5, locy+5), 
				  cv::Scalar(255,255,0,0));
		sprintf_s( text, "%d", x);
		putText(MatImage, text , cv::Point(locx, locy), cv::FONT_HERSHEY_COMPLEX_SMALL, 1.0, cv::Scalar(255,255,255));
	}
}
void VideoGeneration::on_generateVideoPushButton_clicked()
{
	//string camPath = "C:\\Users\\dehandecroos\\Desktop\\Videos\\PRG28.avi";
	QString profileId = ui.profileName_lineEdit->text();


	
	string cameraIds[] = { 
		"camera_node_6_log", 
		"camera_node_1_log", 
		"camera_node_28_log",
		"camera_node_23_log"
	};
	int cameraIdsSize = sizeof(cameraIds) / sizeof(*cameraIds);
	string finalJoinQuery = "";
	int i = 1;
	for (string cameraId : cameraIds)
	{
		finalJoinQuery += "select * from " + cameraId + " where profile_id='" + profileId.toStdString() + "'";
		if (i++ != cameraIdsSize) {
			finalJoinQuery += "union ";
		}
	}
	finalJoinQuery += "order by TimeStamp";
	
	struct CameraTimeStamp{
		string cameraId;
		double timestamp;
	};
	
	
	stmt->execute("USE camera");
	ResultSet *timeStampsForProfile = stmt->executeQuery(finalJoinQuery);
	vector<CameraTimeStamp> timeStamps;
	
	while (timeStampsForProfile->next())
	{
		CameraTimeStamp timeStamp;
		timeStamp.cameraId = timeStampsForProfile->getString("Video_ID");
		timeStamp.timestamp = timeStampsForProfile->getDouble("TimeStamp");
		timeStamps.push_back(timeStamp);
	}

	
	vector<Mat> video;
	for (CameraTimeStamp ts : timeStamps)
	{
		string camPath = "C:\\AdaptiveCameraNetworkPack\\Videos\\";
		string camId = ts.cameraId;
		camPath += "PRG" + camId + ".avi";
		VideoCapture cam;
		cam.open(camPath);
		int frameRate = cam.get(CV_CAP_PROP_FPS);

		int minutes = (int)ts.timestamp;
		int seconds = (int)((ts.timestamp-(double)minutes)*100.0);
		int milis = ((ts.timestamp - (double)minutes)*100.0-seconds)*1000;
		
		int milliseconds = (minutes * 60 + seconds) * 1000 + milis;
		qDebug() << "Extracted Frames for time " + QString::number(ts.timestamp) + ", in camera " + QString::fromStdString(camId);
		cam.set(CV_CAP_PROP_POS_MSEC, milliseconds);
		
		
		for (int frameCount = 0; frameCount < frameRate; frameCount++)
		{
			Mat frame;
			cam >> frame;
			if (frame.empty())
			{
				break;
			}
			int fontFace = FONT_HERSHEY_SIMPLEX;
			double fontScale = 1;
			int thickness = 3;
			cv::Point textOrg1(10, 50);
			putText(frame, "CAM:" + ts.cameraId, textOrg1, fontFace, fontScale, Scalar::all(0),2);
			cv::Point textOrg2(500, 50);
			video.push_back(frame);
		}
		
		

		//VideoCapture

	}

	if (video.size() == 0){
		QImage finalImage(ui.lblOutput->width(), ui.lblOutput->width(), QImage::Format_RGB888);
		QPainter qp(&finalImage);
		qp.setBrush(Qt::black);
		qp.setPen(Qt::red);
		qp.setFont(QFont("Times", 12, QFont::Bold));
		qp.drawText(finalImage.rect(), Qt::AlignCenter, "NO VIDEO FOR "+ profileId);
		ui.lblOutput->setPixmap(QPixmap::fromImage(finalImage));
	}
	else
	{
		for (Mat frameZ : video)
		{
			Mat frameForQimage;
			cvtColor(frameZ, frameForQimage, CV_BGR2RGB);
			QImage outImage((uchar*)frameForQimage.data, frameForQimage.cols, frameForQimage.rows, frameZ.step, QImage::Format_RGB888);
			ui.lblOutput->setPixmap(QPixmap::fromImage(outImage));
			imshow("Output", frameZ);
			cvWaitKey(1);

		}
	}
}
Example #9
0
string OCV::trackAndEval(Mat &threshold, Mat &canvas){
  //~ Mat temp;
  //~ threshold.copyTo(temp);
  //these two vectors needed for output of findContours
  vector< vector<Point> > contours;
  vector<Vec4i> hierarchy;
  //find contours of filtered image using openCV findContours function
  findContours(threshold, contours, hierarchy, CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );
  //use moments method to find our filtered object
  string retValue = "";
  double area;
  int numObjects = hierarchy.size();
  if (debounceCounter) debounceCounter--;
  if (numObjects > 0) {
    //if number of objects greater than MAX_NUM_OBJECTS we have a noisy filter
    if (numObjects==1){
      Moments moment = moments((Mat)contours[0]);
      area = moment.m00;
      Point lastPoint(
        moment.m10/area, // x
        moment.m01/area  // y
      );
      drawObject(area, lastPoint, canvas);
      // Evaluate in which position of the grid the point is
      // state machine
      // TODO CHECk bounding rectangles and contour to evaluate it. Use the layout form PNG image!
      // expression limits
      switch (trackState) {
        case TrackStt::NO_TRACK:
          cout << "TrackStt::NO_TRACK" << endl;
        case TrackStt::UNARMED:
          if (lastPoint.x > EXP_HORI_L) {
            trackState = TrackStt::EXPRESSION;
            //~ cout << "Next state TrackStt::EXPRESSION" << endl; 
          }
          else if (lastPoint.y > BUTT_VER_T && lastPoint.y < BUTT_VER_B) {
            trackState = TrackStt::ARMED;
            cout << "Next state TrackStt::ARMED" << endl;
          }
          else {
            trackState = TrackStt::UNARMED;
          }
          break;
        case TrackStt::ARMED:
          if (lastPoint.x > EXP_HORI_L && lastPoint.x < EXP_HORI_R) {
            trackState = TrackStt::EXPRESSION;
          }
          else if (lastPoint.y > BUTT_VER_B) {
            trackState = TrackStt::DEBOUNCING;
            debounceCounter = debouceFrames;
            if (lastPoint.x > B1_HORI_L && lastPoint.x < B1_HORI_R) {
              cout << "1" << endl; 
              retValue = "1";
            }
            else if (lastPoint.x > B2_HORI_L && lastPoint.x < B2_HORI_R) {
              cout << "2" << endl; 
              retValue = "2";
            }
            else if (lastPoint.x > B3_HORI_L && lastPoint.x < B3_HORI_R) {
              cout << "3" << endl; 
              retValue = "3";
            }
            else if (lastPoint.x > B4_HORI_L && lastPoint.x < B4_HORI_R) {
              cout << "4" << endl; 
              retValue = "4";
            }
          }
          else if (lastPoint.y < BUTT_VER_T) {
            trackState = TrackStt::DEBOUNCING;
            debounceCounter = debouceFrames;
            if (lastPoint.x > B5_HORI_L && lastPoint.x < B5_HORI_R) {
              cout << "5" << endl; 
              retValue = "5";
            }
            else if (lastPoint.x > B6_HORI_L && lastPoint.x < B6_HORI_R) {
              cout << "6" << endl; 
              retValue = "6";
            }
          }
          break;
        case TrackStt::DEBOUNCING: 
          //~ cout << "DEBOUNCING" << endl; 
          if (debounceCounter==0) 
            trackState = TrackStt::UNARMED;
          break;
        case TrackStt::EXPRESSION: 
          if (lastPoint.x < EXP_HORI_L) {
              trackState = TrackStt::UNARMED;
          }
          else{ 
            // TODO make a previous level comparition
            int expLevel;
            if (lastPoint.y > EXP_VER_B) 
              expLevel = 0;
            else if (lastPoint.y < EXP_VER_T)
              expLevel = expressionDiv-1;
            else {
              float ylevel = (float)(lastPoint.y-EXP_VER_T)/(float)(EXP_VER_RANGE);
              expLevel = (int)((float)(expressionDiv-1)*(1.0 - ylevel));
            }
            if (expLevel!=lastExLevel) {
              cout << "Expression level:" << expLevel << endl; 
              retValue = "X"+to_string(expLevel);
              lastExLevel = expLevel;
            }
          }
          break;
        default: 
          break;
      } 
      return retValue;
    }
    else {
      if (trackState!=TrackStt::DEBOUNCING) trackState = TrackStt::NO_TRACK;
      //void putText(Mat& img, const string& text, Point org, int fontFace, double fontScale, Scalar color, int thickness=1, int lineType=8, bool bottomLeftOrigin=false )
      putText(canvas, "More than one object detected!", Point(2, FRAME_HEIGHT-10), 1, 0.7, Scalar(0,0,255), 1);
      cout << "More than one object detected! Next state is TrackStt::NO_TRACK" << endl; 
    }
  }
  if (trackState!=TrackStt::DEBOUNCING) trackState = TrackStt::NO_TRACK;
  return retValue;
}
Example #10
0
void TrackFace::on_drawKeypoints_clicked()
{
    int nFeatures=128;
    TrackFace::capture.open(0);

    string windowName="Draw Keypoints";
    cv::namedWindow(windowName.c_str(), cv::WINDOW_AUTOSIZE);
    cv::moveWindow(windowName.c_str(), window_x, window_y);

    featureExtractor_state=SIFT_MODE;

    while (true)
    {
        cv::Mat frame, buffer;
        if (!capture.isOpened()) break;

        capture >> buffer;
        cv::resize(buffer, frame,Size(buffer.cols/2,buffer.rows/2),0,0,INTER_LINEAR);
        setMouseCallback(windowName.c_str(), drawKeypointsCallBack, NULL);

        switch(featureExtractor_state)
        {
        case SIFT_MODE:
        {
            SiftFeatureDetector detector( nFeatures );
            std::vector<KeyPoint> keypoints;

            detector.detect(frame, keypoints);
            cv::Mat img_keypoints;
            drawKeypoints(frame, keypoints, img_keypoints, Scalar::all(-1), DrawMatchesFlags::DEFAULT );
            putText(img_keypoints, "SIFT MODE, right click to SURF MODE", Point(10, 20), FONT_HERSHEY_PLAIN, 1.0, CV_RGB(255,0,0),2.0);

            imshow(windowName.c_str(), img_keypoints);

            break;
        }
        case SURF_MODE:
        {
            SurfFeatureDetector detector( nFeatures );
            std::vector<KeyPoint> keypoints;

            detector.detect(frame, keypoints);
            cv::Mat img_keypoints;
            drawKeypoints(frame, keypoints, img_keypoints, Scalar::all(-1), DrawMatchesFlags::DEFAULT );

            putText(img_keypoints, "SURF MODE, left click to SIFT MODE", Point(10, 20), FONT_HERSHEY_PLAIN, 1.0, CV_RGB(255,0,0),2.0);

            imshow(windowName.c_str(), img_keypoints);

            break;
        }
        default: break;
        }

        while (cv::waitKey(100)==27)
        {
            capture.release();
            cv::destroyWindow(windowName.c_str());
        }
    }
}
Example #11
0
void TrackFace::on_grabPhoto_clicked()
{
    // new window to collect information
    /*

    transmit data between form is tricky.
    grabForm.show();
    string message=grabForm.getMsg();

    cout << message << endl;
    */

    string name=ui->grabName->text().toStdString();
    cout << name << endl;

    string namepath=iofunctions.addName(name, fn_namedb, fn_path);
    int frames=1;

    int label=0;
    if (!labels.empty()) label=labels[labels.size()-1]+1;

    // Face tracking
    TrackFace::capture.open(0);

    string windowName="Grab Face";
    cv::namedWindow(windowName.c_str(), cv::WINDOW_AUTOSIZE);
    moveWindow(windowName.c_str(), window_x, window_y);
    grab_state=GRABBING_OFF;

    while(true)
    {
        cv::Mat frame, buffer;
        if (!capture.isOpened()) break;

        capture >> buffer;
        cv::resize(buffer, frame,Size(buffer.cols/2,buffer.rows/2),0,0,INTER_LINEAR);
        setMouseCallback(windowName.c_str(), grabFaceCallBack, NULL);

        switch(grab_state)
        {
        case GRABBING_OFF:
        {
            string text=format("Grabbing your face No. %d", frames);
            putText(frame, text, Point(frame.cols/2-250, 100), FONT_HERSHEY_PLAIN, 1.2, CV_RGB(255,0,0),2.0);
            cv::imshow(windowName.c_str(), frame);
            break;
        }
        case GRABBING_ON:
        {
            vector<cv::Rect_<int> > faces=haar_faces(frame);

            if (faces.size()>0)
            {
                size_t n=findMaxFace(faces);

                Mat resizedFace=resizeFace(frame(faces[n]), im_width, im_height);

                string imgPath=namepath+name+"_"+(char)(frames+'A'-1)+".jpg";
                cv::imwrite(imgPath,resizedFace);
                iofunctions.addToTrain(fn_images,"resources/"+name+"/"+name+"_"+(char)(frames+'A'-1)+".jpg", label);

                frames++;

                if (frames>20)
                {
                    grab_state=GRABBING_CLOSE;
                }
                else grab_state=GRABBING_OFF;

                drawFace(frame, faces[n], name);
            }

            cv::imshow(windowName.c_str(), frame);
            break;
        }
        case GRABBING_CLOSE :
        {
            capture.release();
            cv::destroyWindow(windowName.c_str());
            break;
        }
        default: break;
        }

        while (cv::waitKey(5)==27)
        {
            capture.release();
            cv::destroyWindow(windowName.c_str());
        }
    }
}
int HumanFaceRecognizer::runFaceRecognizer(cv::Mat *frame)
{

#ifdef RESIZE_TO_SMALLER
	cv::Mat original = detector.resizeToSmaller(frame);
#else
	cv::Mat original = (*frame).clone();
#endif

#ifdef COMPARE_FACE_COLOUR
	cv::Mat outputMask;
#endif

	double face_pixel_num = 0;
	double similar_pixel_counter = 0;
	int i, j, k, p;
	int face_num = 0; // variable used when saving faces or masks
	std::vector<cv::Rect> newFacePos;
	std::ostringstream oss;

	int predictedLabel = -1;
	double confidence = 0.0;
	double confidence_threshold = 100.0;
	bool isExistedFace = false;
	bool isFace = false;

	// Apply the classifier to the frame
	detector.getFaces(*frame, newFacePos);
	cv::vector<cv::Rect>::iterator it = newFacePos.begin();

	if (newFacePos.size() == 0)
	{
		if (facesInfo.size() == 0)
			cv::waitKey(300);
	}

	removeFaceWithClosedPos();

	// If a detected face at certain position is not detected for a period of time, it is discarded
	for (p = 0; p < (int)facesInfo.size(); ++p)
	{
		if (facesInfo[p].undetected_counter > UNDETECTED_THREHOLD)
		{
#ifdef SHOW_DEBUG_MESSAGES
			std::cout << "erase: " << p << std::endl;
#endif
#ifdef SHOW_MARKERS
			oss.str("");
			oss << "ERASE";
			putText(*frame, oss.str(), facesInfo[p].centerPos, cv::FONT_HERSHEY_SIMPLEX, 0.6,
				cv::Scalar(0, 128, 255), 2);
#endif

			facesInfo.erase(facesInfo.begin() + p--);
			continue;
		}

		for (it = newFacePos.begin(); it != newFacePos.end(); ++it)
		{
			cv::Point center(it->x + it->width / 2, it->y + it->height / 2);

			if ((abs(facesInfo[p].centerPos.x - center.x) < FACE_POS_OFFSET) &&
				(abs(facesInfo[p].centerPos.y - center.y) < FACE_POS_OFFSET))
				break;
		}

		if (it == newFacePos.end())
		{
			++(facesInfo[p].undetected_counter);
#ifdef SHOW_DEBUG_MESSAGES
			std::cout << "undetected: " << facesInfo[p].undetected_counter << std::endl;
#endif
#ifdef SHOW_MARKERS
			oss.str("");
			oss << "und";
			putText(*frame, oss.str(), facesInfo[p].centerPos, cv::FONT_HERSHEY_SIMPLEX, 0.6,
				cv::Scalar(0, 128, 255), 2);
#endif
		}
	}

	// evaluate a list of possible faces
	for (i = 0, it = newFacePos.begin(); it != newFacePos.end(); ++it, ++i)
	{
		++face_num;

#ifdef RESIZE_TO_SMALLER
		cv::Mat face = original(cv::Rect((*it).x * RESIZE_SCALE, (*it).y * RESIZE_SCALE, 
			(*it).width * RESIZE_SCALE, (*it).height * RESIZE_SCALE)).clone();
#else
		cv::Mat face = original(*it).clone();
#endif
		resize(face, face, cv::Size(FACE_REC_SIZE, FACE_REC_SIZE));

		cv::Mat face_grey;
		cv::Point center(it->x + it->width*0.5, it->y + it->height*0.5);
		cv::Point top(it->x, it->y);

#ifdef COMPARE_FACE_COLOUR
		cv::vector<cv::Mat> channels;
		cv::Mat face_eq;
		cvtColor(face, face_eq, CV_BGR2YCrCb); //change the color image from BGR to YCrCb format
		split(face_eq, channels); //split the image into channels
		equalizeHist(channels[0], channels[0]); //equalize histogram on the 1st channel (Y)
		merge(channels, face_eq); //merge 3 channels including the modified 1st channel into one image
		cvtColor(face_eq, face_eq, CV_YCrCb2BGR); //change the color image from YCrCb to BGR format (to display image properly)

		detector.compareFaceColour(face_eq, outputMask);
		//detector.compareFaceColour(face, outputMask);

#ifndef FACE_MASK_COLOUR
		face_pixel_num = outputMask.rows * outputMask.cols;
#else
		face_pixel_num = outputMask.rows * outputMask.cols * NUM_OF_CHANNELS_COLOUR;
#endif
		for (j = 0; j < outputMask.rows; ++j)
		{
			for (k = 0; k < outputMask.cols; ++k)
			{
#ifndef FACE_MASK_COLOUR
				if (*(outputMask.data + (j*outputMask.cols + k)) == 255)
					similar_pixel_counter += 1;
#else
				for (int m = 0; m < NUM_OF_CHANNELS_COLOUR; ++m)
				{
					if (*(outputMask.data + j*outputMask.step + k + m) == 255)
						similar_pixel_counter += 1;
				}
#endif
			}
		}
		similar_pixel_counter /= face_pixel_num;
#endif

#ifdef COMPARE_FACE_COLOUR
		if ((similar_pixel_counter > min_percent) && (similar_pixel_counter < max_percent))  // if the percentage of similar pixeel is within certain range, it is a face
#else
		cv::cvtColor(face, face_grey, CV_BGR2GRAY);
#endif
		{
#ifdef DURATION_CHECK_FACE
			double time = 0;
			uint64_t oldCount = 0, curCount = 0;
			curCount = cv::getTickCount();
#endif
			cv::cvtColor(face_eq, face_grey, CV_BGR2GRAY);
			model->predict(face_grey, predictedLabel, confidence);
			if (confidence > confidence_threshold)
				predictedLabel = Guest;

#ifdef DURATION_CHECK_FACE
			time = (cv::getTickCount() - curCount) / cv::getTickFrequency();
			printf("\t FaceRecDur: %f\n", time);
#endif

			isExistedFace = false;
			for (p = 0; p < facesInfo.size(); ++p)
			{
				if (isExistedFace)
					break;

				if ((abs(facesInfo[p].centerPos.x - center.x) < FACE_POS_OFFSET) &&
					(abs(facesInfo[p].centerPos.y - center.y) < FACE_POS_OFFSET))
				{
					memcpy(&(facesInfo[p].centerPos), &center, sizeof(cv::Point));
					++(facesInfo[p].counter[predictedLabel]);

					if (!(facesInfo[p].isRecognized))
					{
						std::string str;
						oss.str("");
						switch (predictedLabel)
						{
						case -1:
#ifdef SHOW_MARKERS
							oss << "unrecognised";
#endif
							break;

						case Guest:
							if (facesInfo[p].counter[Guest] >= FACE_DET_THREHOLD * 2) {
#ifdef SHOW_MARKERS
								oss << PERSON_NAME[Guest] << " " << confidence;
#endif
								if (facesInfo[p].counter[Guest] == 10) {
									str = std::string(HELLO_MESSAGE) + std::string(PERSON_NAME[Guest]);
									TextToSpeech::pushBack(str);
								}
							}
#ifdef SHOW_MARKERS
							else
								oss << DETECTING << confidence;
#endif
							break;

						case Joel:
						case KaHo:
						case Yumi:
						default:
							if (facesInfo[p].counter[predictedLabel] >= FACE_DET_THREHOLD) {
#ifdef SHOW_MARKERS
								//oss << PERSON_NAME[facesInfo[p].label] << " " << confidence;
								oss << PERSON_NAME[predictedLabel] << " detected";
								//oss << PERSON_NAME[predictedLabel];
#endif
								facesInfo[p].isRecognized = true;
								facesInfo[p].label = (DETECTED_PERSON)predictedLabel;
#ifdef SHOW_DEBUG_MESSAGES
								std::cout << "detected: " << predictedLabel << '\n';
#endif
								/* Text to Speech */
								if (center.x < RIGHT_THREASHOLD)
									str = std::string(PERSON_NAME[predictedLabel]) + std::string(RIGHT_MESSAGE);
								else if (center.x > LEFT_THREASHOLD)
									str = std::string(PERSON_NAME[predictedLabel]) + std::string(LEFT_MESSAGE);
								else
									str = std::string(PERSON_NAME[predictedLabel]) + std::string(CENTER_MESSAGE);

								//str = std::string(HELLO_MESSAGE) + std::string(PERSON_NAME[predictedLabel]);
								TextToSpeech::pushBack(str);
							}
#ifdef SHOW_MARKERS
							else
							{
								//oss << DETECTING << ", maybe " << PERSON_NAME[facesInfo[p].label];
								oss << "maybe " << PERSON_NAME[predictedLabel] << "-" << confidence;
							}
#endif
							break;
						}

					}
					else
					{
						if (predictedLabel > 0 && predictedLabel < PERSON_NAME.size())
						{
							if ((float)facesInfo[p].counter[predictedLabel] / (float)facesInfo[p].counter[facesInfo[p].label] > 2.0)
							{
								facesInfo[p].label = (DETECTED_PERSON)predictedLabel;

								/* Text to Speech */
								if (center.x < RIGHT_THREASHOLD)
									TextToSpeech::pushBack(std::string(PERSON_NAME[predictedLabel]) + std::string(RIGHT_MESSAGE));
								else if (center.x > LEFT_THREASHOLD)
									TextToSpeech::pushBack(std::string(PERSON_NAME[predictedLabel]) + std::string(LEFT_MESSAGE));
								else
									TextToSpeech::pushBack(std::string(PERSON_NAME[predictedLabel]) + std::string(CENTER_MESSAGE));
								
								//TextToSpeech::pushBack(std::string(HELLO_MESSAGE) + std::string(PERSON_NAME[predictedLabel]));
							}

#ifdef SHOW_MARKERS
							oss.str("");
							oss << "D:" << PERSON_NAME[facesInfo[p].label] << ",R:" << PERSON_NAME[predictedLabel] << "-" << confidence;
							//oss << PERSON_NAME[facesInfo[p].label];
							facesInfo[p].undetected_counter = 0;
#endif
						}
					}

					isExistedFace = true;
				}
			}

			if (facesInfo.size() == 0 || !isExistedFace)
			{
				DetectionInfo para;
				memset(&para, 0, sizeof(DetectionInfo));
				para.isRecognized = false;
				memcpy(&(para.centerPos), &center, sizeof(cv::Point));
				memcpy(&(para.size), &(it->size()), sizeof(cv::Size));
				para.counter.resize(num_of_person_in_db, 0);
				para.counter[predictedLabel] = 1;
				facesInfo.push_back(para);

#ifdef SHOW_MARKERS
				oss.str("");
				oss << "maybe " << PERSON_NAME[predictedLabel] << "-" << confidence;
#endif
			}
#ifdef SHOW_DEBUG_MESSAGES
			std::cout << "facesInfo size: " << facesInfo.size() << std::endl;
#endif

#ifdef SHOW_MARKERS
			putText(*frame, oss.str(), top, cv::FONT_HERSHEY_SIMPLEX, 0.5,
				cv::Scalar(255, 0, 255, 1));
			ellipse(*frame, center, cv::Size(it->width/2, it->height/2), 0,
				0, 360, cv::Scalar(0, 0, 255), 6, 8, 0);
#endif

#ifdef SAVE_IMAGES
#ifdef SAVE_FACES
			oss.str("");
#ifdef TEST_FACE
			oss << "_Test_Face_B" << currPer << "_" << BASE_DIR << CORRECT_DIR << image_num << "_" << face_num << FACE_NAME_POSTFIX << IMAGE_EXTENSION;
#else
			oss << BASE_DIR << CORRECT_DIR << image_num << "_" << face_num << FACE_NAME_POSTFIX << IMAGE_EXTENSION;
#endif
			cv::imwrite(oss.str(), face);
#endif
#ifdef COMPARE_FACE_COLOUR
#ifdef SAVE_MASKS
			oss.str("");
#ifdef TEST_FACE
			oss << "_Test_Face_B" << currPer << "_" << BASE_DIR << CORRECT_DIR << image_num << "_" << face_num << MASK_NAME_POSTFIX << IMAGE_EXTENSION;
#else
			oss << BASE_DIR << CORRECT_DIR << image_num << "_" << face_num << MASK_NAME_POSTFIX << IMAGE_EXTENSION;
#endif
			cv::imwrite(oss.str(), outputMask);
#endif
#endif
#endif
		}
#ifdef COMPARE_FACE_COLOUR
		else // it is not a face
		{
#ifdef SHOW_MARKERS
			ellipse(*frame, center, cv::Size(it->width*0.5, it->height*0.5), 0, 0, 360, cv::Scalar(255, 0, 0), 4, 8, 0);
#endif

#ifdef SAVE_IMAGES
#ifdef SAVE_FACES
			oss.str("");
#ifdef TEST_FACE
			oss << "_Test_Face_B" << currPer << "_" << BASE_DIR << WRONG_DIR << image_num << "_" << face_num << FACE_NAME_POSTFIX << IMAGE_EXTENSION;
#else
			oss << BASE_DIR << WRONG_DIR << image_num << "_" << face_num << FACE_NAME_POSTFIX << IMAGE_EXTENSION;
#endif
			cv::imwrite(oss.str(), face);
#endif
#ifdef SAVE_MASKS
			oss.str("");
#ifdef TEST_FACE
			oss << "_Test_Face_B" << currPer << "_" << BASE_DIR << WRONG_DIR << image_num << "_" << face_num << MASK_NAME_POSTFIX << IMAGE_EXTENSION;
#else
			oss << BASE_DIR << WRONG_DIR << image_num << "_" << face_num << MASK_NAME_POSTFIX << IMAGE_EXTENSION;
#endif
			cv::imwrite(oss.str(), outputMask);
#endif
#endif
		}
#endif


#ifdef DISPLAY_FACES_AND_MASKS
		oss.str("");
		oss << "face[" << i << "]";
		cv::namedWindow(oss.str());                        // Create a window for display.
		cv::imshow(oss.str(), face);                       // Show our image inside it.

#ifdef COMPARE_FACE_COLOUR
		oss.str("");
		oss << "outputMask[" << i << "]";
		cv::namedWindow(oss.str());                        // Create a window for display.
		cv::imshow(oss.str(), outputMask);                 // Show our image inside it.
#endif
#endif

#ifdef COMPARE_FACE_COLOUR
		total_percent += similar_pixel_counter;
		//total_percent_var += pow(similar_pixel_counter - total_percent, 2);
		similar_pixel_counter = 0;
#endif
		totalConfidence += confidence;
		num_of_face_detected++;

		isFace = false;
	}
WayPointsViewer::WayPointsViewer()
{
	m_iterations = 0;
	m_timewarp   = 1;
	m_ref_identifiants_points = 1;
	
	m_centre_ile_px.x = 245;
	m_centre_ile_px.y = 260;
	
	m_moosgeodesy.Initialise(48.303131, -4.537218);

	// Parcours large
	/*m_waypoints.push_back(make_point(-4.534274881126787,48.30328123684544));
	m_waypoints.push_back(make_point(-4.534354338163467,48.3028926627652));
	m_waypoints.push_back(make_point(-4.534987862732912,48.30265745068134));
	m_waypoints.push_back(make_point(-4.534912811622956,48.30172799501912));
	m_waypoints.push_back(make_point(-4.535541337401179,48.30144781440489));
	m_waypoints.push_back(make_point(-4.536623557565463,48.30171121108806));
	m_waypoints.push_back(make_point(-4.538187294636643,48.3015653100484));
	m_waypoints.push_back(make_point(-4.539082024594746,48.30173802893376));
	m_waypoints.push_back(make_point(-4.539520934204093,48.302301986646));
	m_waypoints.push_back(make_point(-4.539370088840833,48.30346048022233));
	m_waypoints.push_back(make_point(-4.538118344474873,48.30466090900167));
	m_waypoints.push_back(make_point(-4.536681443170951,48.30492727178057));
	m_waypoints.push_back(make_point(-4.535945459593399,48.30479686179294));
	m_waypoints.push_back(make_point(-4.535589168541877,48.30434275663863));
	m_waypoints.push_back(make_point(-4.535439601902204,48.3037517878406));
	m_waypoints.push_back(make_point(-4.534638383926079,48.30352082998913));
	m_waypoints.push_back(make_point(-4.534274881126787,48.30328123684544));*/

	// Prcours proche de l'île
	m_waypoints.push_back(make_point(-4.534590163338356,48.30323621201908));
	m_waypoints.push_back(make_point(-4.53478762216008,48.30334651838246));
	m_waypoints.push_back(make_point(-4.535338202882623,48.30326872403582));
	m_waypoints.push_back(make_point(-4.535709627194199,48.30335301044348));
	m_waypoints.push_back(make_point(-4.535923558290338,48.3038853028886));
	m_waypoints.push_back(make_point(-4.535892651305976,48.30424405590262));
	m_waypoints.push_back(make_point(-4.536055327369376,48.30454539251438));
	m_waypoints.push_back(make_point(-4.536493267983829,48.30473876429819));
	m_waypoints.push_back(make_point(-4.537175041209843,48.30462322599557));
	m_waypoints.push_back(make_point(-4.537865910908123,48.30450112394015));
	m_waypoints.push_back(make_point(-4.538193579521336,48.30431361797088));
	m_waypoints.push_back(make_point(-4.539054088257123,48.30339799457749));
	m_waypoints.push_back(make_point(-4.539203448805972,48.30307094254877));
	m_waypoints.push_back(make_point(-4.539270962811838,48.30218588442434));
	m_waypoints.push_back(make_point(-4.538953658010474,48.30191794661709));
	m_waypoints.push_back(make_point(-4.538187539710014,48.30174497426677));
	m_waypoints.push_back(make_point(-4.536933709104124,48.30201349870482));
	m_waypoints.push_back(make_point(-4.535811260639849,48.30167755409671));
	m_waypoints.push_back(make_point(-4.535348754982527,48.30173218255946));
	m_waypoints.push_back(make_point(-4.535488634255342,48.30278271885528));
	m_waypoints.push_back(make_point(-4.535270044927863,48.30293684926692));
	m_waypoints.push_back(make_point(-4.534597992379511,48.30303867953135));
	m_waypoints.push_back(make_point(-4.534590163338356,48.30323621201908));
	
	m_position = make_point(-4.534590163338356,48.30323621201908);
	m_moosgeodesy.LatLong2LocalUTM(m_position.y, m_position.x, m_position.y, m_position.x);
	
	double nord, est;
	for(list<Point2D>::iterator it = m_waypoints.begin() ; it != m_waypoints.end() ; it ++)
		m_moosgeodesy.LatLong2LocalUTM(it->y, it->x, it->y, it->x);
	
	cvNamedWindow("Mapping", 1);
	m_map_background = imread("background_ile.png", CV_LOAD_IMAGE_COLOR);
	
	// Centre de l'île
	tracerPointeur(m_map_background, m_centre_ile_px.x, m_centre_ile_px.y, 20, 1, Scalar(208, 222, 223));
	
	// Echelle
	double a = metresEnPixels(50.);
	line(m_map_background, 
			Point(10, HAUTEUR_MAPPING - 10), 
			Point(10 + a, HAUTEUR_MAPPING - 10), 
			Scalar(255, 255, 255), 1, 8, 0);
	line(m_map_background, 
			Point(10, HAUTEUR_MAPPING - 10), 
			Point(10, HAUTEUR_MAPPING - 13), 
			Scalar(255, 255, 255), 1, 8, 0);
	line(m_map_background, 
			Point(10 + a, HAUTEUR_MAPPING - 10), 
			Point(10 + a, HAUTEUR_MAPPING - 13), 
			Scalar(255, 255, 255), 1, 8, 0);
	
	// Parcours à suivre
	for(list<Point2D>::iterator it = m_waypoints.begin() ; it != m_waypoints.end() ; it ++)
	{
		if(it->identifiant == 1)
			continue;
		
		char texte[10];
		int x, y, delta_x, delta_y;
		x = m_centre_ile_px.x + metresEnPixels(it->x);
		y = m_centre_ile_px.y - metresEnPixels(it->y);
		sprintf(texte, "%d", it->identifiant);
		
		if(x > m_centre_ile_px.x)
			delta_x = 4;
		
		else
			delta_x = -(10 * strlen(texte));
		
		if(y > m_centre_ile_px.y)
			delta_y = 10;
		
		else
			delta_y = -5;
		
		tracerPointeur(m_map_background, x, y, 5, 1, Scalar(243, 109, 77));
		putText(m_map_background, string(texte), 
					Point(x + delta_x, y + delta_y), 
					FONT_HERSHEY_SIMPLEX, 
					0.3, 
					Scalar(243, 109, 77));
	}
}
// ekrana çizen fonksiyon.
void DrawScreen::drawWindow(cv::Point& coordinate){
	// sayilari ciz
	drawNumbers();
	// secilen sayilari ekrana basar
	if (num_color) {
		putText(frames, numbers, cv::Point(105, 51), 5, 3, NUMBER2_COLOR, 3);
	}
	else {
		putText(frames, numbers, cv::Point(105, 51), 5, 3, NUMBER3_COLOR, 3);
	}

	if (coordinate.x != -1 && coordinate.y != -1){
		int temp = returnPosition(coordinate.x, coordinate.y);
		// eger temp 0'dan buyukse ve keyTurn onceden bir tusun ustunde bulunmussa timeCounter artilir
		if (temp > 0 && temp == keyTurn){
			timeCounter++;
		}
		// eger temp 0'dan buuykse ama keyTurk tempe esit degilse , ilk kez o numaraya giriyor demektir , timeCounter 1 yapilir ve keyTurn'e temp degeri atanir
		else if (temp > 0){
			keyTurn = temp;
			timeCounter = 1;
		}
		// bunlarin hicbiri olmamissa timeCounter ve keyTurn default degerlerine dondurulur
		else{
			keyTurn = -1;
			timeCounter = 0;
		}

		// dairesel progress bar ciz
		//drawProgressBar(temp);

		line(frames, cv::Point(100, 65), cv::Point(370, 65), cv::Scalar(255, 255, 255), 6, 2, 0);	// progress line
		line(frames, cv::Point(100, 65), cv::Point((timeCounter/2 * 27) + 100, 65), cv::Scalar(0, ((timeCounter * 10) % 256), (255 - ((timeCounter * 10) % 256))), 6, 2, 0);	// progress line

		// eger timeCounter degeri istenen miktara ulasmissa bu deger artik bir input olmus demektir
		if (timeCounter >= TIME_COUNTER){
			// eger bu deger 11'den kucuk ve 0'dan buyuk ise bu bir rakam demektir , 10'a gore mod alinir
			if (keyTurn < 10 && keyTurn>0){
				keyTurn = keyTurn % 10;
				printf("Deger %d\n", keyTurn);
				numbers = numbers + intToString(keyTurn);
			}
			// 10 ise iptal butonudur
			else if (keyTurn == 10){
				printf("IPTAL\n");
				num_color = true;
				numbers = "";
			}
			// 11 ise "0" butonudur
			else if (keyTurn == 11){
				printf("Deger 0\n");
				numbers = numbers + "0";
			}
			// 12 ise silme butonudur
			else if (keyTurn == 12){
				printf("ONAY\n");
				num_color = false;
			}

			// timeCounter ve keyTurn degerlerini default degerlerine getir
			timeCounter = 0;
			keyTurn = -1;
		}
		// marker icin arti isaretini ciz
		if (temp == -1)
		{
			drawObject(coordinate.x, coordinate.y);
		}
	}

	// marker bulunamamissa keyTurn ve timeCounter degerlerini default degerlerine getir
	else{
		keyTurn = -1;
		timeCounter = 0;
	}
}
bool WallFollowing::Iterate()
{
	float angle = 0.0, coefficient_affichage = 45.0/*8.*/;
	float distance = 0.0, taille_pointeur;
	m_iterations++;

	m_map = Mat::zeros(LARGEUR_MAPPING, HAUTEUR_MAPPING, CV_8UC3);
	m_map = Scalar(255, 255, 255);
	
	std::vector<Point2f> points_obstacles;
	
	for(list<pair<float, float> >::const_iterator it = m_obstacles.begin() ; it != m_obstacles.end() ; it ++)
	{
		angle = it->first; 		// clef
		distance = it->second; 	// valeur
		
		//if(distance < 5.)
		if(distance < 0.25 || distance > 2.)
			continue;
		
		float x_obstacle = 0;
		float y_obstacle = 0;

		y_obstacle -= distance * cos(angle * M_PI / 180.0);
		x_obstacle += distance * sin(angle * M_PI / 180.0);
		
		// Filtrage des angles
		double angle_degre = MOOSRad2Deg(MOOS_ANGLE_WRAP(MOOSDeg2Rad(angle)));
		if(angle_degre > -160. && angle_degre < -70.)
		{
			points_obstacles.push_back(Point2f(x_obstacle, y_obstacle));
			
			x_obstacle *= -coefficient_affichage;
			y_obstacle *= coefficient_affichage;
			
			x_obstacle += LARGEUR_MAPPING / 2.0;
			y_obstacle += HAUTEUR_MAPPING / 2.0;
			
			// Pointeurs
			taille_pointeur = 3;
			line(m_map, Point(x_obstacle, y_obstacle - taille_pointeur), Point(x_obstacle, y_obstacle + taille_pointeur), Scalar(161, 149, 104), 1, 8, 0);
			line(m_map, Point(x_obstacle - taille_pointeur, y_obstacle), Point(x_obstacle + taille_pointeur, y_obstacle), Scalar(161, 149, 104), 1, 8, 0);
		}
	}
	
	int echelle_ligne = 150;
	Mat m(points_obstacles);
	
	if(!points_obstacles.empty())
	{
		Vec4f resultat_regression;
		
		try
		{
			// Méthode 1
			fitLine(m, resultat_regression, CV_DIST_L2, 0, 0.01, 0.01);
			float x0 = resultat_regression[2];
			float y0 = resultat_regression[3];
			float vx = resultat_regression[0];
			float vy = resultat_regression[1];
			// Affichage de l'approximation
			line(m_map, 
					Point((LARGEUR_MAPPING / 2.0) - (x0 * coefficient_affichage) + (vx * echelle_ligne), 
							(HAUTEUR_MAPPING / 2.0) + (y0 * coefficient_affichage) - (vy * echelle_ligne)),
					Point((LARGEUR_MAPPING / 2.0) - (x0 * coefficient_affichage) - (vx * echelle_ligne), 
							(HAUTEUR_MAPPING / 2.0) + (y0 * coefficient_affichage) + (vy * echelle_ligne)),
					Scalar(29, 133, 217), 1, 8, 0); // Orange
					
			// Méthode 2
			fitLine(m, resultat_regression, CV_DIST_L12, 0, 0.01, 0.01);
			x0 = resultat_regression[2];
			y0 = resultat_regression[3];
			vx = resultat_regression[0];
			vy = resultat_regression[1];
			// Affichage de l'approximation
			line(m_map, 
					Point((LARGEUR_MAPPING / 2.0) - (x0 * coefficient_affichage) + (vx * echelle_ligne), 
							(HAUTEUR_MAPPING / 2.0) + (y0 * coefficient_affichage) - (vy * echelle_ligne)),
					Point((LARGEUR_MAPPING / 2.0) - (x0 * coefficient_affichage) - (vx * echelle_ligne), 
							(HAUTEUR_MAPPING / 2.0) + (y0 * coefficient_affichage) + (vy * echelle_ligne)),
					Scalar(77, 130, 27), 1, 8, 0); // Vert
					
			// Méthode 3
			fitLine(m, resultat_regression, CV_DIST_L1, 0, 0.01, 0.01);
			x0 = resultat_regression[2];
			y0 = resultat_regression[3];
			vx = resultat_regression[0];
			vy = resultat_regression[1];
			// Affichage de l'approximation
			line(m_map, 
					Point((LARGEUR_MAPPING / 2.0) - (x0 * coefficient_affichage) + (vx * echelle_ligne), 
							(HAUTEUR_MAPPING / 2.0) + (y0 * coefficient_affichage) - (vy * echelle_ligne)),
					Point((LARGEUR_MAPPING / 2.0) - (x0 * coefficient_affichage) - (vx * echelle_ligne), 
							(HAUTEUR_MAPPING / 2.0) + (y0 * coefficient_affichage) + (vy * echelle_ligne)),
					Scalar(13, 13, 188), 1, 8, 0); // Rouge
			// Affichage de l'origine
			taille_pointeur = 6;
			line(m_map, 
					Point((LARGEUR_MAPPING / 2.0) - (x0 * coefficient_affichage), 
							(HAUTEUR_MAPPING / 2.0) + (y0 * coefficient_affichage) - taille_pointeur),
					Point((LARGEUR_MAPPING / 2.0) - (x0 * coefficient_affichage), 
							(HAUTEUR_MAPPING / 2.0) + (y0 * coefficient_affichage) + taille_pointeur),
					Scalar(9, 0, 130), 2, 8, 0);
			line(m_map, 
					Point((LARGEUR_MAPPING / 2.0) - (x0 * coefficient_affichage) - taille_pointeur, 
							(HAUTEUR_MAPPING / 2.0) + (y0 * coefficient_affichage)),
					Point((LARGEUR_MAPPING / 2.0) - (x0 * coefficient_affichage) + taille_pointeur, 
							(HAUTEUR_MAPPING / 2.0) + (y0 * coefficient_affichage)),
					Scalar(9, 0, 130), 2, 8, 0);
			
			angle = atan2(vy, vx);
			cout << "X0 : " << x0 << "\t\tY0 : " << y0 << endl;
			distance = abs(-vy*x0 + vx*y0);
			cout << "Angle : " << angle * 180.0 / M_PI << "\t\tDist : " << distance << endl;
			m_Comms.Notify("DIST_MUR", distance);

			if(m_regulate)
				computeAndSendCommands(angle, distance);
		}
		
		catch(Exception e) { }
		
		// Rotation
		Point2f src_center(m_map.cols/2.0F, m_map.rows/2.0F);
		Mat rot_mat = getRotationMatrix2D(src_center, 180.0, 1.0);
		warpAffine(m_map, m_map, rot_mat, m_map.size());
	}
		
	// Affichage des échelles circulaires
	char texte[50];
	float taille_texte = 0.4;
	Scalar couleur_echelles(220, 220, 220);
	for(float j = 1.0 ; j < 30.0 ; j ++)
	{
		float rayon = coefficient_affichage * j;
		circle(m_map, Point(LARGEUR_MAPPING / 2, HAUTEUR_MAPPING / 2), rayon, couleur_echelles, 1);
		sprintf(texte, "%dm", (int)j);
		rayon *= cos(M_PI / 4.0);
		putText(m_map, string(texte), Point((LARGEUR_MAPPING / 2) + rayon, (HAUTEUR_MAPPING / 2) - rayon), FONT_HERSHEY_SIMPLEX, taille_texte, couleur_echelles);
	}
	
	// Affichage de l'origine
	taille_pointeur = 20;
	line(m_map, Point(LARGEUR_MAPPING / 2, HAUTEUR_MAPPING / 2 - taille_pointeur * 1.5), Point(LARGEUR_MAPPING / 2, HAUTEUR_MAPPING / 2 + taille_pointeur), Scalar(150, 150, 150), 1, 8, 0);
	line(m_map, Point(LARGEUR_MAPPING / 2 - taille_pointeur, HAUTEUR_MAPPING / 2), Point(LARGEUR_MAPPING / 2 + taille_pointeur, HAUTEUR_MAPPING / 2), Scalar(150, 150, 150), 1, 8, 0);
	
	// Localisation des points de données
	line(m_map, Point(0, (HAUTEUR_MAPPING / 2) + HAUTEUR_MAPPING * sin(MOOSDeg2Rad(-70.))), Point(LARGEUR_MAPPING / 2, HAUTEUR_MAPPING / 2), Scalar(150, 150, 150), 1, 8, 0);
	line(m_map, Point(0, (HAUTEUR_MAPPING / 2) - HAUTEUR_MAPPING * sin(MOOSDeg2Rad(-160.))), Point(LARGEUR_MAPPING / 2, HAUTEUR_MAPPING / 2), Scalar(150, 150, 150), 1, 8, 0);
	
	// Affichage d'informations
	if(!points_obstacles.empty())
	{
		sprintf(texte, "Dist = %.2fm   Angle = %.2f", distance, angle);
		putText(m_map, string(texte), Point(10, HAUTEUR_MAPPING - 10), FONT_HERSHEY_SIMPLEX, taille_texte, Scalar(50, 50, 50));
	}
	
	imshow("Mapping", m_map);
	waitKey(1);
	
	return(true);
}
Example #16
0
void VideoCorrect::correctImage(Mat& inputFrame, Mat& outputFrame, bool developerMode){
	
	resize(inputFrame, inputFrame, CAMERA_RESOLUTION);
	inputFrame.copyTo(img);

	//Convert to YCbCr color space
	cvtColor(img, ycbcr, CV_BGR2YCrCb);

	//Skin color thresholding
	inRange(ycbcr, Scalar(0, 150 - Cr, 100 - Cb), Scalar(255, 150 + Cr, 100 + Cb), bw);

	if(IS_INITIAL_FRAME){
		face = detectFaces(img);
		if(face.x != 0){
			lastFace = face;
		}
		else{
			outputFrame = img;
			return;
		}
		prevSize = Size(face.width/2, face.height/2);
		head = Mat::zeros(bw.rows, bw.cols, bw.type());
		ellipse(head, Point(face.x + face.width/2, face.y + face.height/2), prevSize, 0, 0, 360, Scalar(255,255,255,0), -1, 8, 0);
		if(face.x > 0 && face.y > 0 && face.width > 0 && face.height > 0 
			&& (face.x + face.width) < img.cols && (face.y + face.height) < img.rows){
			img(face).copyTo(bestImg);
		}
		putText(img, "Give your best pose!", Point(face.x, face.y), CV_FONT_HERSHEY_SIMPLEX, 0.4, Scalar(255,255,255,0), 1, CV_AA);
	}

	firstFrameCounter--;

	if(face.x == 0) //missing face prevention
		face = lastFace;

	//Mask the background out
	bw &= head;

	//Compute more accurate image moments after background removal
	m = moments(bw, true);
	angle = (atan((2*m.nu11)/(m.nu20-m.nu02))/2)*180/PI;
	center = Point(m.m10/m.m00,m.m01/m.m00);

	//Smooth rotation (running average)
	bufferCounter++;
	rotationBuffer[ bufferCounter % SMOOTHER_SIZE ] = angle;
	smoothAngle += (angle - rotationBuffer[(bufferCounter + 1) % SMOOTHER_SIZE]) / SMOOTHER_SIZE;

	//Expand borders
	copyMakeBorder( img, img, BORDER_EXPAND, BORDER_EXPAND, BORDER_EXPAND, BORDER_EXPAND, 
					BORDER_REPLICATE, Scalar(255,255,255,0));

	if(!IS_INITIAL_FRAME){
		//Rotate the image to correct the leaning angle
		rotateImage(img, smoothAngle);
	
		//After rotation detect faces
		face = detectFaces(img);
		if(face.x != 0)
			lastFace = face;

		//Create background mask around the face
		head = Mat::zeros(bw.rows, bw.cols, bw.type());
		ellipse(head, Point(face.x - BORDER_EXPAND + face.width/2, face.y -BORDER_EXPAND + face.height/2),
					  prevSize, 0, 0, 360, Scalar(255,255,255,0), -1, 8, 0);

		//Draw a rectangle around the face
		//rectangle(img, face, Scalar(255,255,255,0), 1, 8, 0);

		//Overlay the ideal pose
		if(replaceFace && center.x > 0 && center.y > 0){
			center = Point(face.x + face.width/2, face.y + face.width/2);
			overlayImage(img, bestImg, center, smoothSize);
		}

	} else{
		face.x += BORDER_EXPAND; //position alignment after border expansion (not necessary if we detect the face after expansion)
		face.y += BORDER_EXPAND;
	}
	
	//Smooth ideal image size (running average)
	sizeBuffer[ bufferCounter % SMOOTHER_SIZE ] = face.width;
	smoothSize += (face.width - sizeBuffer[(bufferCounter + 1) % SMOOTHER_SIZE]) / SMOOTHER_SIZE;

	//Get ROI
	center = Point(face.x + face.width/2, face.y + face.width/2);
	roi = getROI(img, center);
	if(roi.x > 0 && roi.y > 0 && roi.width > 0 && roi.height > 0 
		&& (roi.x + roi.width) < img.cols && (roi.y + roi.height) < img.rows){
		img = img(roi);
	}

	//Resize the final image
	resize(img, img, CAMERA_RESOLUTION);

	if(developerMode){

		Mat developerScreen(img.rows, 
							img.cols + 
							inputFrame.cols +
							bw.cols, CV_8UC3);

		Mat left(developerScreen, Rect(0, 0, img.size().width, img.size().height));
		img.copyTo(left);

		Mat center(developerScreen, Rect(img.cols, 0, inputFrame.cols, inputFrame.rows));
		inputFrame.copyTo(center);

		cvtColor(bw, bw, CV_GRAY2BGR);
		Mat right(developerScreen, Rect(img.size().width + inputFrame.size().width, 0, bw.size().width, bw.size().height));
		bw.copyTo(right);

		Mat rightmost(developerScreen, Rect(img.size().width + inputFrame.size().width + bw.size().width - bestImg.size().width, 0,
											bestImg.size().width, bestImg.size().height));
		bestImg.copyTo(rightmost);

		outputFrame = developerScreen;
	}
	else{
		outputFrame = img;
	}
}
Example #17
0
void Calibration::draw(){
	switch(calibrationStage){
		case ORANGE:{
			putText(inputImage, "Laranja", Point(12, 15), 0, 0.5, CV_RGB(255, 255, 0), 1, 8, false);
		}break;
		case BLUE:{
			putText(inputImage, "Azul", Point(12, 15), 0, 0.5, CV_RGB(255, 255, 0), 1, 8, false);
		}break;
		case YELLOW:{
			putText(inputImage, "Amarelo", Point(12, 15), 0, 0.5, CV_RGB(255, 255, 0), 1, 8, false);
		}break;
		case PURPLE:{
			putText(inputImage, "Roxo", Point(12, 15), 0, 0.5, CV_RGB(255, 255, 0), 1, 8, false);
		}break;
		case PINK:{
			putText(inputImage, "Rosa", Point(12, 15), 0, 0.5, CV_RGB(255, 255, 0), 1, 8, false);
		}break;
		case BROWN:{
			putText(inputImage, "Marrom", Point(12, 15), 0, 0.5, CV_RGB(255, 255, 0), 1, 8, false);
		}break;
		case RED:{
			putText(inputImage, "Vermelho", Point(12, 15), 0, 0.5, CV_RGB(255, 255, 0), 1, 8, false);
		}break;
		case GREEN:{
			putText(inputImage, "Verde", Point(12, 15), 0, 0.5, CV_RGB(255, 255, 0), 1, 8, false);
		}break;
		case ROTATION:{
			putText(inputImage, "Rotacao", Point(12, 15), 0, 0.5, CV_RGB(255, 255, 0), 1, 8, false);
		}break;
		case CUT:{
			putText(inputImage, "Corte", Point(12, 15), 0, 0.5, CV_RGB(255, 255, 0), 1, 8, false);
		}break;
		default:{
			putText(inputImage, "Unknown", Point(12, 15), 0, 0.5, CV_RGB(255, 255, 0), 1, 8, false);
		}break;
	}
}
Example #18
0
int process(VideoCapture& capture) {
    long captureTime;
    cout << "Press q or escape to quit!" << endl;

    CvFont infoFont;
    cvInitFont(&infoFont, CV_FONT_HERSHEY_SIMPLEX, 1, 1);

    namedWindow(VIDEO_WINDOW_NAME, CV_WINDOW_AUTOSIZE);
    namedWindow(ERODE_PREVIEW_WIN_NAME, CV_WINDOW_NORMAL);
    resizeWindow(ERODE_PREVIEW_WIN_NAME, 320, 240);
    ControlsWindow* controlsWindow = new ControlsWindow();

    if(fileExists(preferenceFileName)) {
        loadSettings(controlsWindow, (char*)preferenceFileName);
    }

    Mat frame;
    while (true) {
        capture >> frame;
        captureTime = (int)(getTickCount()/getTickFrequency())*1000;

        if (frame.empty())
            break;

        int target_width = 320;
        int height = (target_width/capture.get(3 /*width*/)) * capture.get(4 /*height*/);
        resize(frame, frame, Size(target_width, height));

        if (controlsWindow->getBlurDeviation() > 0) {
            GaussianBlur(frame, frame, Size(GAUSSIAN_KERNEL, GAUSSIAN_KERNEL), controlsWindow->getBlurDeviation());
        }

        //Apply brightness and contrast
        frame.convertTo(frame, -1, controlsWindow->getContrast(), controlsWindow->getBrightness());

        Mat maskedImage = thresholdImage(controlsWindow, frame);

        Mat erodedImage = erodeDilate(maskedImage, controlsWindow);

        Mat erodedImageBinary;

        cvtColor(erodedImage, erodedImageBinary, COLOR_BGR2GRAY);
        threshold(erodedImageBinary, erodedImageBinary, 0, 255, CV_THRESH_BINARY);

        if(controlsWindow->getInvert()) {
            erodedImageBinary = 255 - erodedImageBinary;
        }

        cv::SimpleBlobDetector::Params params;
        params.minDistBetweenBlobs = 50.0f;
        params.filterByInertia = false;
        params.filterByConvexity = false;
        params.filterByColor = true;
        params.filterByCircularity = false;
        params.filterByArea = true;
        params.minArea = 1000.0f;
        params.maxArea = 100000.0f;
        params.blobColor = 255;

        vector<KeyPoint> centers;
        vector<vector<Point>> contours;
        ModBlobDetector* blobDetector = new ModBlobDetector(params);

        vector<vector<Point>> contourHulls;
        vector<RotatedRect> contourRects;
        blobDetector->findBlobs(erodedImageBinary, erodedImageBinary, centers, contours);
        for(vector<Point> ctpts : contours) {
            vector<Point> hull;
            convexHull(ctpts, hull);
            contourHulls.push_back(hull);
            contourRects.push_back(minAreaRect(hull));
        }
#ifdef DEBUG_BLOBS
        drawContours(frame, contours, -1, Scalar(128,255,128), 2, CV_AA);
        drawContours(frame, contourHulls, -1, Scalar(255, 128,0), 2, CV_AA);
        int ptnum;
        for(KeyPoint pt : centers) {
            Scalar color(255, 0, 255);
            circle(frame, pt.pt, 5
                   , color, -1 /*filled*/, CV_AA);
            circle(frame, pt.pt, pt.size, color, 1, CV_AA);
            ptnum++;
        }
#endif
        for(RotatedRect rr : contourRects) {
            Point2f points[4];
            rr.points(points);
            float side1 = distance(points[0], points[1]);
            float side2 = distance(points[1], points[2]);

            float shortestSide = min(side1, side2);
            float longestSide = max(side1, side2);
            float aspectRatio = longestSide/shortestSide;
            int b = 0;
            bool isTape = objInfo.aspectRatio == 0 ? false :
                          abs(objInfo.aspectRatio - aspectRatio) < 0.2*objInfo.aspectRatio;
            /*
             * TODO
             * Make a list of possible tape candidates
             * Use tape candidate with smallest difference in ratio to the real ratio as the tape
             */
            if(isTape) {
                b = 255;
                string widthText = "Width (px): ";
                widthText.append(toString(longestSide));
                string heightText = "Height (px): ";
                heightText.append(toString(shortestSide));
                string rotText = "Rotation (deg): ";
                rotText.append(toString(abs((int)rr.angle)));
                string distText;
                if(camSettings.focalLength == -1) {
                    distText = "Focal length not defined";
                } else {
                    float dist = objInfo.width * camSettings.focalLength / longestSide;
                    distText = "Distance (cm): ";
                    distText.append(toString(dist));
                }
                putText(frame, widthText, Point(0, 20), CV_FONT_HERSHEY_SIMPLEX, 0.5f, Scalar(0, 255, 255));
                putText(frame, heightText, Point(0, 40), CV_FONT_HERSHEY_SIMPLEX, 0.5f, Scalar(0, 255, 255));
                putText(frame, rotText, Point(0, 60), CV_FONT_HERSHEY_SIMPLEX, 0.5f, Scalar(0, 255, 255));
                putText(frame, distText, Point(0, 80), CV_FONT_HERSHEY_SIMPLEX, 0.5f, Scalar(0, 255, 255));
            }

            rotated_rect(frame, rr, Scalar(b, 0, 255));
            if(isTape)break;
        }
        if(objInfo.aspectRatio == 0) {
            putText(frame, "Invalid object info (object.xml)", Point(0, 20), CV_FONT_HERSHEY_SIMPLEX, 0.5f, Scalar(0, 255, 255));
        }
        delete blobDetector;

        imshow(ERODE_PREVIEW_WIN_NAME, erodedImageBinary);

        imshow(VIDEO_WINDOW_NAME, frame);

        //int waitTime = max((int)(((1.0/framerate)*1000)
        //                   - ((int)(getTickCount()/getTickFrequency())*1000 - captureTime))
        //                   , 1);
        char key = (char)waitKey(1);
        switch (key) {
        case 'q':
        case 'Q':
        case 27: //escape
            saveSettings(controlsWindow, (char*)preferenceFileName);
            return 0;
        default:
            break;
        }
        std::this_thread::yield();
    }

    saveSettings(controlsWindow, (char*)preferenceFileName);
    delete(controlsWindow);

    destroyAllWindows();
    return 0;
}
int main(int argc, char *argv[])
{
  /// Read Images
  std::vector<cv::Mat> images = read_images();

  for (size_t i = 1; i < images.size(); i++)
  {
    if (images[0].rows != images[i].rows || images[0].cols != images[i].cols)
    {
      std::cout << "ERROR: Images need to be of the same size\n";
      return -1;
    }
  }

  /// Reshape Image
  int number_of_pixels = 0;
  for (int i = 0; i < SLICES; i++)
  {
    number_of_pixels += images[i].rows * images[i].cols;
  }
  cv::Mat pixel_values;
  for (int i = 0; i < SLICES; i++)
  {
    cv::Mat temp;
    images[i].reshape(1, images[i].rows * images[i].cols).copyTo(temp);
    pixel_values.push_back(temp);
  }
  pixel_values.convertTo(pixel_values, CV_32F);

  /// Do k-means
  cv::Mat bestLabels;
  cv::kmeans(pixel_values, K, bestLabels, cv::TermCriteria(), 10,
   cv::KMEANS_RANDOM_CENTERS);

  std::vector<cv::Mat> clustered_images;

  /// bestLabels, contains the number of the cluster to which each pixel belongs
  int so_far = 0;
  for (int i = 0; i < SLICES; i++)
  {
    cv::Mat temp;
    bestLabels.rowRange(so_far, so_far + images[i].rows * images[i].cols)
     .copyTo(temp);
    so_far += images[i].rows * images[i].cols;

    // cv::Mat new_temp;
    temp = temp.reshape(1, images[i].rows);
    clustered_images.push_back(temp);
  }
  std::vector<cv::Rect> blobs;
  std::vector<int> blob_count;
  std::vector<int> image_count;
  three_d_connected_components(clustered_images, blobs, image_count,
                               blob_count);

  for (size_t i = 0; i < blob_count.size(); i++)
  {
    blob_count[i] -= K;
  }

  /// Show images
  for (int i = 0; i < SLICES; i++)
  {
    /// Draw rectangles on image
    for (size_t r = 0; r < image_count.size(); r++)
    {
      if (image_count[r] == i)
      {
        rectangle(images[i], blobs[r], cv::Scalar(0, 0, 0), 1);
        std::ostringstream txt;
        txt << blob_count[r];
        cv::Point origin(blobs[r].x + blobs[r].width / 2,
                         blobs[r].y + blobs[r].height / 2);
        putText(images[i], txt.str().c_str(), origin,
                cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 0, 0), 1);
      }
    }
    std::ostringstream name1;
    name1 << "Original " << i + 1;
    cv::namedWindow(name1.str().c_str(), 1);
    cv::imshow(name1.str().c_str(), images[i]);
  }
  cv::waitKey(0);
  return 0;
}
Mat HUD(Mat videoFeed, int sizex, int sizey) {
	// Read HUD image
	Mat image;
	if (gameOn && !peoplePicked && !cratePicked)
		image = imread("..\\..\\src\\resource\\hudg.png", -1);
	else if (gameOn && !peoplePicked && cratePicked)
		image = imread("..\\..\\src\\resource\\hudc.png", -1);
	else if (gameOn && peoplePicked && !cratePicked)
		image = imread("..\\..\\src\\resource\\hudp.png", -1);
	else if (gameOn && peoplePicked && cratePicked)
		image = imread("..\\..\\src\\resource\\hudpc.png", -1);
	else
		image = imread("..\\..\\src\\resource\\hud.png", -1);

	// Create a buffer of the image
	Mat buffer;
	videoFeed.copyTo(buffer);

	// Resized HUD image
	Mat rImage;

	// Resize HUD to fit window
	resize(image, rImage, Size(sizex, sizey), 0, 0, INTER_CUBIC);

	// Create a matrix to mix the video feed with the HUD image
	Mat result;

	// Overlay the HUD over the video feed
	OverlayImage(videoFeed, rImage, result, Point(0, 0));


	// Draw the crosshair
	Point2f point(sizex / 2, sizey / 2);
	circle(buffer, point, 10, Scalar(255, 255, 0), -1);


	// Display info on to HUD
	ostringstream str; // string stream
	ostringstream str2; // string stream
	ostringstream str3; // string stream
	ostringstream str4; // string stream
	ostringstream str5; // string stream

	// Absolute control flag
	str << "Absolute control : " << absoluteControl;
	putText(result, str.str(), Point(10, 90), CV_FONT_HERSHEY_PLAIN, 1.2, CV_RGB(0, 250, 0));

	// Battery
	str2 << ardrone.getBatteryPercentage();
	putText(result, str2.str(), Point(180, 33), CV_FONT_HERSHEY_PLAIN, 2, CV_RGB(0, 250, 0), 2);

	// Altitude
	str3 << ardrone.getAltitude();
	putText(result, str3.str(), Point(440, 33), CV_FONT_HERSHEY_PLAIN, 2, CV_RGB(0, 250, 0), 2);

	// Show game info when game starts
	if (gameOn) {
		// Points
		str4 << points;
		putText(result, str4.str(), Point(WIDTH / 2.1, 45), CV_FONT_HERSHEY_PLAIN, 1, CV_RGB(0, 250, 0), 2);

		// Time
		int now = cvGetTickCount();
		int passedSinceStart = ((now - gameTimeStart) / (cvGetTickFrequency() * 1000)) / 1000;
		gameTimeLeft = TIME_LIMIT - passedSinceStart;
		str5 << gameTimeLeft;
		if (isDroneConnected)
			putText(result, str5.str(), Point(WIDTH / 2.3, HEIGHT - 25), CV_FONT_HERSHEY_PLAIN, 2, CV_RGB(0, 250, 0), 2);
		else
			putText(result, str5.str(), Point(WIDTH / 2.2, WEBCAM_HEIGHT - 20), CV_FONT_HERSHEY_PLAIN, 2, CV_RGB(0, 250, 0), 2);
	}


	// Combine buffer with original image + opacity
	double opacity = 0.2;
	addWeighted(buffer, opacity, result, 1 - opacity, 0, result);

	return result;
}
Example #21
0
void CImageProc::teamAndBallIdentify( void ) {
	for (row=1; row < IMAGE_HEIGHT; row++){
		label[0][row] = 0;
		color[0][row] = 0;
	}

	for (col=0; col < IMAGE_WIDTH; col++){
		label[col][0] = 0;
		color[col][0] = 0;
	}

	numcolor = 0;
	eq_set.clear();
	eq_set.push_back(0);
	luas_area.clear();
	locatex.clear();
	locatey.clear();
	label2color.clear();

	for (row=1; row < IMAGE_HEIGHT; row++){
		for (col=1; col < IMAGE_WIDTH; col++){
			if		 (colorDistance(MatImage.at<cv::Vec3b>(row, col), homeTeamColor) < thresholdval) {
				color[col][row] = 1;
			}else /**/if (colorDistance(MatImage.at<cv::Vec3b>(row, col), oppoTeamColor) < thresholdval) {
				color[col][row] = 2;
			} else /**/if (colorDistance(MatImage.at<cv::Vec3b>(row, col), ballColor) < thresholdval) {
				color[col][row] = 3;
			} else {
				label[col][row] = 0;
				color[col][row] = 0;
			}
			
			if (color[col][row] != 0){
				if		 (color[col][row-1] == color[col][row] && color[col-1][row] != color[col][row]){
					label[col][row] = label[col][row-1];
				} else if(color[col-1][row] == color[col][row] && color[col][row-1] != color[col][row]){
					label[col][row] = label[col-1][row];
				} else if(color[col-1][row]   == color[col][row] && color[col][row-1]   == color[col][row]){
					label[col][row] = label[col][row-1];
					if (label[col-1][row] != label[col][row]){
						eq_set[label[col-1][row]] = eq_set[label[col][row]];
					}
				} else {
					numcolor++;
					label[col][row] = numcolor;
					eq_set.push_back(numcolor);
				}
			} /**/
		}
	} 

	for (row=0; row < IMAGE_HEIGHT; row++){
		for (col=0; col < IMAGE_WIDTH; col++){
			if(eq_set[label[col][row]] != label[col][row])
				label[col][row] = eq_set[label[col][row]];

			if(label[col][row] != 0){
				label2color[label[col][row]] = color[col][row];
				luas_area[label[col][row]]++;
				locatex[label[col][row]] += col;
				locatey[label[col][row]] += row;
			}
		}
	} 

	for(it = luas_area.begin(); it != luas_area.end() ; it++){
		if((*it).second < 100)
			continue;

		cv::Point2i thisPoint;
		thisPoint.x = locatex[(*it).first]/(*it).second;
		thisPoint.y = locatey[(*it).first]/(*it).second;

		if(label2color[(*it).first] == 1){
			homeTeamColorPoint.push_back(thisPoint);
		}else if(label2color[(*it).first] == 2)
			oppoTeamColorPoint.push_back(thisPoint);
		else 
			ballColorPoint = thisPoint;
	}

	cout<< "Ball Location : " 
		<< ballColorPoint.x << "," 
		<< ballColorPoint.y << endl;
	rectangle(MatImage, 
			  cv::Point(ballColorPoint.x-10, ballColorPoint.y-10), 
			  cv::Point(ballColorPoint.x+10, ballColorPoint.y+10), 
			  cv::Scalar(255,255,0,0));
	//text = "Ball";
	sprintf_s( text, "Ball" );
	putText(MatImage, text , cv::Point(ballColorPoint.x, ballColorPoint.y), cv::FONT_HERSHEY_COMPLEX_SMALL, 1.0, cv::Scalar(255,255,255));

	//for(int i=0; i<homeTeamColorPoint.size(); i++){
	//	//cout<< "Team " << i+1 <<" Location : " 
	//	//	<< homeTeamColorPoint[i].x << "," 
	//	//	<< homeTeamColorPoint[i].y << endl;
	//	//rectangle(MatImage, 
	//	//	  cv::Point(homeTeamColorPoint[i].x-2, homeTeamColorPoint[i].y-2), 
	//	//	  cv::Point(homeTeamColorPoint[i].x+2, homeTeamColorPoint[i].y+2), 
	//	//	  cv::Scalar(255,255,0,0));
	//	//sprintf_s( id, "T%d", (i+1));
	//	//putText(MatImage, id , cv::Point(homeTeamColorPoint[i].x, homeTeamColorPoint[i].y), cv::FONT_HERSHEY_COMPLEX_SMALL, 1.0, cv::Scalar(255,255,255));
	//}

	for(int i=0; i<oppoTeamColorPoint.size(); i++){
		//cout<< "Opponent " << i+1 <<" Location : " 
		//	<< oppoTeamColorPoint[i].x << "," 
		//	<< oppoTeamColorPoint[i].y << endl;
		rectangle(MatImage, 
			  cv::Point(oppoTeamColorPoint[i].x-10, oppoTeamColorPoint[i].y-10), 
			  cv::Point(oppoTeamColorPoint[i].x+10, oppoTeamColorPoint[i].y+10), 
			  cv::Scalar(255,255,0,0));
		sprintf_s( text, "O%d", (i+1));
		putText(MatImage, text , cv::Point(oppoTeamColorPoint[i].x, oppoTeamColorPoint[i].y), cv::FONT_HERSHEY_COMPLEX_SMALL, 1.0, cv::Scalar(255,255,255));
	}
}
Example #22
0
/**
 * Outputs the coordinates given
 *
 * @param plot A pointer to the image. If set to NULL in outputs the
 *             coordinates in seperate window
 * @param coordinates Values of the coordinates
 * @param labels Labels of the coordiantes shown
 */
void WorldPlotter::plotCoordinates(Mat *plot, Vector<Point3f> &coordinates,
                                   vector<string> &labels) {
    int count = coordinates.size();
    Mat img;
    bool toPlot = false;

    if (plot == NULL) {
        toPlot = true;
        img = Mat::zeros(count * 15 + 10, plot_size_x, CV_8UC3);
        plot = &img;
    }

    int precision = 2;
    int width     = 10;

    String x, y, z;
    String label;

    for(int i = 0; i < count; ++i) {
        label = &labels.at(i)[2];

        stringstream sstr;

        float step;

        sstr << left << label.c_str() << right;

        if (labels.at(i)[0] == 'D') {
            x = "D";
            y = " ";
            z = " ";
            step = (float)(plot_size_x - 120) / 1.0f;
        } else if (labels.at(i)[0] == 'O') {
            x = "r";
            y = "p";
            z = "y";
            step = (float)(plot_size_x - 120) / 3.0f;
        } else {
            x = "x";
            y = "y";
            z = "z";
            step = (float)(plot_size_x - 120) / 3.0f;
        }

        putText(*plot, sstr.str(), Point2i(10, 15 * (i + 1)), FONT_HERSHEY_PLAIN, 1,
                text_color);

        stringstream sstrx;
        sstrx.precision(precision);
        sstrx.setf(ios::fixed, ios::floatfield);

        sstrx << "> " << x << ": ";
        sstrx.width(width);
        sstrx << right << coordinates[i].x;

        putText(*plot, sstrx.str(), Point2i(120, 15 * (i + 1)),
                FONT_HERSHEY_PLAIN, 1, text_color);

        if (labels.at(i)[0] == 'D')
            continue;

        stringstream sstry;
        sstry.precision(precision);
        sstry.setf(ios::fixed, ios::floatfield);

        sstry << " " << y << ": ";
        sstry.width(width);
        sstry << right << coordinates[i].y;

        putText(*plot, sstry.str(), Point2i(120 + step, 15 * (i + 1)),
                FONT_HERSHEY_PLAIN, 1, text_color);

        stringstream sstrz;
        sstrz.precision(precision);
        sstrz.setf(ios::fixed, ios::floatfield);

        sstrz << " " << z << ": ";
        sstrz.width(width);
        sstrz << right << coordinates[i].z;

        putText(*plot, sstrz.str(), Point2i(120 + 2 * step, 15 * (i + 1)),
                FONT_HERSHEY_PLAIN, 1, text_color);
    }

    if (toPlot) {
        namedWindow("Coordinates");
        imshow("Coordinates", *plot);
    }
}
/*****************************************************************************
 // This applies a brute force match without a trained datastructure.
 // It also calculates the two nearest neigbors.
 // @paraam query_image: the input image
 // @param matches_out: a pointer that stores the output matches. It is necessary for
 //                     pose estimation.
 */
int brute_force_match(cv::Mat& query_image,  std::vector< cv::DMatch> * matches_out)
{
    
    // variabels that keep the query keypoints and query descriptors
    std::vector<cv::KeyPoint>           keypointsQuery;
    cv::Mat                             descriptorQuery;
    
    // Temporary variables for the matching results
    std::vector< std::vector< cv::DMatch> > matches1;
    std::vector< std::vector< cv::DMatch> > matches2;
    std::vector< std::vector< cv::DMatch> > matches_opt1;
    
    
    //////////////////////////////////////////////////////////////////////
    // 1. Detect the keypoints
    // This line detects keypoints in the query image
    _detector->detect(query_image, keypointsQuery);
    
    
    
    // If keypoints were found, descriptors are extracted.
    if(keypointsQuery.size() > 0)
    {
        // extract descriptors
        _extractor->compute( query_image, keypointsQuery, descriptorQuery);
        
    }
    
#ifdef DEBUG_OUT
    std::cout << "Found " << descriptorQuery.size() << " feature descriptors in the image."  << std::endl;
#endif
    
    
    //////////////////////////////////////////////////////////////////////////////
    // 2. Here we match the descriptors with all descriptors in the database
    // with k-nearest neighbors with k=2
    
    int max_removed = INT_MAX;
    int max_id = -1;
    
    for(int i=0; i<_descriptorsRefDB.size(); i++)
    {
        std::vector< std::vector< cv::DMatch> > matches_temp1;
        
        // Here we match all query descriptors agains all db descriptors and try to find
        // matching descriptors
        _brute_force_matcher.knnMatch( descriptorQuery, _descriptorsRefDB[i],  matches_temp1, 2);
        
        
        ///////////////////////////////////////////////////////
        // 3. Refinement; Ratio test
        // The ratio test only accept matches which are clear without ambiguity.
        // The best hit must be closer to the query descriptors than the second hit.
        int removed = ratioTest(matches_temp1);
        
        
        
        // We only accept the match with the highest number of hits / the vector with the minimum revmoved features
        int num_matches = matches_temp1.size();
        if(removed < max_removed)
        {
            max_removed = removed;
            max_id = i;
            matches1.clear();
            matches1 = matches_temp1;
        }
    }
    
#ifdef DEBUG_OUT
    std::cout << "Feature map number " << max_id << " has the highest hit with "<< matches1.size() -  max_removed << " descriptors." << std::endl;
#endif
    
    
    std::vector< std::vector< cv::DMatch> > matches_temp2;
    
    // Here we match all query descriptors agains all db descriptors and try to find
    // matching descriptors
    _brute_force_matcher.knnMatch(_descriptorsRefDB[max_id],  descriptorQuery,  matches_temp2, 2);
    
    // The ratio test only accept matches which are clear without ambiguity.
    // The best hit must be closer to the query descriptors than the second hit.
    int removed = ratioTest(matches_temp2);
    
    
    
    
    ///////////////////////////////////////////////////////
    // 6. Refinement; Symmetry test
    // We only accept matches which appear in both knn-matches.
    // It should not matter whether we test the database against the query desriptors
    // or the query descriptors against the database.
    // If we do not find the same solution in both directions, we toss the match.
    std::vector<cv::DMatch> symMatches;
    symmetryTest(  matches1, matches_temp2, symMatches);
#ifdef DEBUG_OUT
    std::cout << "Kept " << symMatches.size() << " matches after symetry test test."  << std::endl;
#endif
    
    ///////////////////////////////////////////////////////
    // 7. Refinement; Epipolar constraint
    // We perform a Epipolar test using the RANSAC method.
    if(symMatches.size() > 25)
    {
        matches_out->clear();
        ransacTest( symMatches,  _keypointsRefDB[max_id], keypointsQuery, *matches_out);
        
        
    }
    
#ifdef DEBUG_OUT
    std::cout << "Kept " << matches_out->size() << " matches after RANSAC test."  << std::endl;
#endif
    
    ///////////////////////////////////////////////////////
    // 8.  Draw this image on screen.
    cv::Mat out;
    cv::drawMatches(feature_map_database[max_id]._ref_image , _keypointsRefDB[max_id], query_image, keypointsQuery, *matches_out, out, cv::Scalar(255,255,255), cv::Scalar(0,0,255));
    
    std::string num_matches_str;
    std::strstream conv;
    conv << matches_out->size();
    conv >> num_matches_str;
    
    std::string text;
    text.append( num_matches_str);
    text.append("( " + _num_ref_features_in_db_str + " total)");
    text.append(" matches were found in reference image ");
    text.append( feature_map_database[max_id]._ref_image_str);
    
    putText(out, text, cvPoint(20,20),
            cv::FONT_HERSHEY_COMPLEX_SMALL, 1.0, cvScalar(0,255,255), 1, CV_AA);
    
    cv::imshow("result", out);
    if (run_video) cv::waitKey(1);
    else cv::waitKey();
    
    
    
    // Delete the images
    query_image.release();
    out.release();
    
    
    return max_id;
    
}
Example #24
0
/**
 * This is the function that plots the helicopter AND the object of interest,
 * along with its normal.
 *
 * @param objectPosition Position of an object
 * @param objectNormal Normal of an object
 * @param quadPosition Position of the quad
 * @param quadOrientation Roll, Pitch, and Yaw of the quad.
 */
void WorldPlotter::plotTopView(Point3f objectPosition, Point3f objectNormal,
                               Point3f quadPosition, Point3f quadOrientation) {

    Mat plot = Mat::zeros(plot_size_y, plot_size_x, CV_8UC3);

    plotAxes(plot);

    // Plot Normal Vector
    Point2i object_normal_p1, object_normal_p2;

    object_normal_p1.x = objectPosition.x / real_size_x * plot_size_x / 1000.0f
                         + plot_size_x / 2;
    object_normal_p1.y = objectPosition.y / real_size_y * plot_size_y / 1000.0f
                         + plot_size_y / 2;

    object_normal_p2.x = object_normal_p1.x - 25 * objectNormal.x;
    object_normal_p2.y = object_normal_p1.y - 25 * objectNormal.y;

    object_trace.push_back(object_normal_p1);
    plotTrace(plot, object_trace, object_color);

    line(plot, object_normal_p1, object_normal_p2, normal_color,
         normal_thickness);

    rectangle(plot,
              Point2i(object_normal_p1.x - object_size,
                      object_normal_p1.y - object_size),
              Point2i(object_normal_p1.x + object_size,
                      object_normal_p1.y + object_size),
              object_color, object_thickness);

    float x, y, z;
    x = quadPosition.x;
    y = quadPosition.y;
    z = quadPosition.z;

    float roll, pitch, yaw;
    roll  = quadOrientation.x;
    pitch = quadOrientation.y;
    yaw   = quadOrientation.z;

    float q_x, q_y;

    q_x = (x + 0.1 * cos(yaw)) / real_size_x * plot_size_x
          + plot_size_x / 2;
    q_y = (y + 0.1* sin(yaw)) / real_size_y * plot_size_y
          + plot_size_y / 2;

    x = x / real_size_x * plot_size_x + plot_size_x / 2;
    y = y / real_size_y * plot_size_y + plot_size_y / 2;

    quad_trace.push_back(Point2i(x, y));
    plotTrace(plot, quad_trace, quad_color);

    rectangle(plot, Point2i(x - object_size, y - object_size),
              Point2i(x + object_size, y + object_size), quad_color,
              object_thickness);

    rectangle(plot, Point2i(q_x - 1, q_y - 1), Point2i(q_x + 1, q_y + 1),
              quad_color, object_thickness);

    line(plot, Point2i(x, y), Point2i(q_x, q_y), normal_color, normal_thickness);

    putText(plot, "Object", Point2i(object_normal_p1.x, object_normal_p1.y - 10),
            FONT_HERSHEY_PLAIN, 1, text_color);

    putText(plot, "Quad", Point2i(x, y - 10), FONT_HERSHEY_PLAIN, 1, text_color);

    putText(plot, "iron curtain", Point2i(plot_size_x / 2 - 37, plot_size_y - 9),
            FONT_HERSHEY_PLAIN, 1, Scalar(0, 0, 255));

    objectPosition *= 0.001;

    Vec3f d = cv::Vec3f(objectPosition) - cv::Vec3f(quadPosition);
    Point3f distance = cv::Point3f(sqrt(d[0] * d[0] + d[1] * d[1] + d[2] * d[2]),
                                   0, 0);

    Point3f new_yaw = cv::Point3f(0, 0, arcTan(objectNormal.x,
                                  objectNormal.y));

    float normalization = sqrt(objectNormal.x * objectNormal.x +
                               objectNormal.y * objectNormal.y);
    float scale = -1.5;
    float golden_x = objectPosition.x + scale * objectNormal.x / normalization;
    float golden_y = objectPosition.y + scale * objectNormal.y / normalization;

    //printf("%f   %f \n", golden_x, golden_y);

    golden_x = golden_x / real_size_x * plot_size_x + plot_size_x / 2;
    golden_y = golden_y / real_size_y * plot_size_y + plot_size_y / 2;

    cv::Point2i goldenPoint = cv::Point2i(golden_x, golden_y);
    cv::circle(plot, goldenPoint, 2, Scalar(0, 0, 255), 2);

    cv::Vector<Point3f> coordinates;
    vector<string> labels;

    coordinates.push_back(objectPosition);
    labels.push_back("P.Object pos.");
    coordinates.push_back(quadPosition);
    labels.push_back("P.Quad pos.");
    coordinates.push_back(quadOrientation);
    labels.push_back("O.Quad orient.");
    coordinates.push_back(distance);
    labels.push_back("D.Distance");
    coordinates.push_back(new_yaw);
    labels.push_back("P.Goal Yaw");

    plotCoordinates(NULL, coordinates, labels);
    outputPlot = plot;
    namedWindow("Top View Plot");
    imshow("Top View Plot", plot);
}
Example #25
0
void Dashboard::putMonoText(int x, int y, const char *text, int size, SDLColor color)
{
    putText(x, y, text, size, color, true);
}
Example #26
0
void trackAndDrawObjects(cv::Mat& image, int frameNumber, std::vector<cv::LatentSvmDetector::ObjectDetection> detections,
			 std::vector<kstate>& kstates, std::vector<bool>& active,
			 std::vector<cv::Scalar> colors, const sensor_msgs::Image& image_source)
{
	std::vector<kstate> tracked_detections;

	cv::TickMeter tm;
	tm.start();
	//std::cout << "START tracking...";
	doTracking(detections, frameNumber, kstates, active, image, tracked_detections, colors);
	tm.stop();
	//std::cout << "END Tracking time = " << tm.getTimeSec() << " sec" << std::endl;

	//ROS
	int num = tracked_detections.size();
	std::vector<cv_tracker::image_rect_ranged> rect_ranged_array;
	std::vector<int> real_data(num,0);
	std::vector<int> obj_id(num, 0);
	std::vector<int> lifespan(num, 0);
	//ENDROS

	for (size_t i = 0; i < tracked_detections.size(); i++)
	{
		kstate od = tracked_detections[i];
		cv_tracker::image_rect_ranged rect_ranged_;

		//od.rect contains x,y, width, height
		rectangle(image, od.pos, od.color, 3);
		putText(image, SSTR(od.id), cv::Point(od.pos.x + 4, od.pos.y + 13), cv::FONT_HERSHEY_SIMPLEX, 0.55, od.color, 2);
		//ROS
		obj_id[i] = od.id; // ?
		rect_ranged_.rect.x	= od.pos.x;
		rect_ranged_.rect.y	= od.pos.y;
		rect_ranged_.rect.width	= od.pos.width;
		rect_ranged_.rect.height = od.pos.height;
		rect_ranged_.range	= od.range;
		rect_ranged_.min_height	= od.min_height;
		rect_ranged_.max_height	= od.max_height;

		rect_ranged_array.push_back(rect_ranged_);

		real_data[i] = od.real_data;
		lifespan[i] = od.lifespan;
		//ENDROS
	}
	//more ros
	cv_tracker::image_obj_tracked kf_objects_msg;

	kf_objects_msg.type = object_type;
	kf_objects_msg.total_num = num;
	copy(rect_ranged_array.begin(), rect_ranged_array.end(), back_inserter(kf_objects_msg.rect_ranged)); // copy vector
	copy(real_data.begin(), real_data.end(), back_inserter(kf_objects_msg.real_data)); // copy vector
	copy(obj_id.begin(), obj_id.end(), back_inserter(kf_objects_msg.obj_id)); // copy vector
	copy(lifespan.begin(), lifespan.end(), back_inserter(kf_objects_msg.lifespan)); // copy vector

//	kf_objects_msg_.header = image_source.header;
	kf_objects_msg.header = image_objects_header;
	kf_objects_msg_ = kf_objects_msg;;
	track_ready_ = true;
	publish_if_possible();

	//cout << "."<< endl;
}
    /** \brief Draw track information on the image

      Go through each vertex. Draw it as a circle.
      For each adjacent vertex to it. Draw a line to it.

      */
    void FeatureGrouperVisualizer::Draw(){
        TracksConnectionGraph::vertex_iterator vi, viend;
        TracksConnectionGraph::out_edge_iterator ei, eiend;
        TracksConnectionGraph::vertex_descriptor other_v;

        TracksConnectionGraph & graph = feature_grouper_->tracks_connection_graph_;

        cv::Point2f position_in_image,
            position_in_image2,
            position_in_world,
            position_to_draw;
        CvScalar color;

        char position_text[256];
        for (tie(vi, viend) = vertices(graph); vi != viend; ++vi ){
            // Convert position to image coordinate
            position_in_world = (graph)[*vi].pos;
            convert_to_image_coordinate(position_in_world, homography_matrix_, &position_in_image);

            TracksConnectionGraph::vertices_size_type num_components;
//            std::vector<TracksConnectionGraph::vertices_size_type> connected_components_map = feature_grouper_->GetConnectedComponentsMap(num_components);

            if (is_draw_inactive){
                // Draw this track with only two colors (blue for those tracked for a long time, red otherwise)
                if(graph[*vi].previous_displacements.size() >= feature_grouper_->maximum_previous_points_remembered_){
                    color = CV_RGB(0,0,255);
                } else {
                    color = CV_RGB(255,0,0);
                }
                circle(image_, position_in_image, 1, color);
            } else {
                if (graph[*vi].activated){
                    // color this vertex based on its assigned component_id
                    color = ColorPallete::colors[graph[*vi].component_id % ColorPallete::NUM_COLORS_IN_PALLETE];

                    circle(image_, position_in_image, 1, color);
                }
            }

            // Write Text Information for this track
            if (is_draw_coordinate){
                sprintf(position_text, "%d(%5.1f,%5.1f)", (graph)[*vi].id, position_in_world.x, position_in_world.y);
                position_to_draw.x = position_in_image.x + 5;
                position_to_draw.y = position_in_image.y + 5;
                putText(image_, position_text, position_to_draw, FONT_HERSHEY_PLAIN, 0.4, CV_RGB(128,128,0));
            }

            // Draw lines to adjacent vertices (if the edge is active)
            for (tie(ei, eiend) = out_edges(*vi, graph); ei!=eiend; ++ei){
                if (!graph[*ei].active)
                    continue;

                // Get where this out_edge is pointing to
                other_v = target(*ei, graph);

                // Convert position to image coordinate
                position_in_world = (graph)[other_v].pos;
                convert_to_image_coordinate(position_in_world, homography_matrix_, &position_in_image2);

                line(image_, position_in_image, position_in_image2, color);
            }
        }
    }
void FieldLineDetector::findTransformation(cv::Mat& src, cv::Mat& imgDst,
		std::vector<cv::Point2f>& modelBots, cv::Mat& H)
{
	this->botPosField = modelBots;

	Mat imgBw;
	blur(src, imgBw, Size(5, 5));
	cvtColor(imgBw, imgBw, CV_BGR2GRAY);

	Mat imgEdges;
	Canny(imgBw, imgEdges, 50, 100, 3);

//	imshow("bw", imgBw);
//	imshow("edges", imgEdges);

	std::vector<cv::Vec4i> lines;
	HoughLinesP(imgEdges, lines, 1, CV_PI / 180, min_threshold + p_trackbar,
			minLineLength, maxLineGap);

	// Expand the lines little bit (by scaleFactor)
	for (int i = 0; i < lines.size(); i++)
	{
		cv::Vec4i v = lines[i];
		cv::Point2f p1 = Point2f(v[0], v[1]);
		cv::Point2f p2 = Point2f(v[2], v[3]);
		cv::Point2f p1p2 = p2 - p1;
		float length = norm(p1p2);

		cv::Point2f scaleP2 = p2 + p1p2 * (scaleFactor / 10.0f);
		cv::Point2f scaleP1 = p1 - p1p2 * (scaleFactor / 10.0f);

		lines[i][0] = scaleP1.x;
		lines[i][1] = scaleP1.y;
		lines[i][2] = scaleP2.x;
		lines[i][3] = scaleP2.y;
	}

	createThresholdedImg(src);

	// do line detection!
	detectCorners(lines);
	filterCorners();
	findNeighbors(lines);
	findCornerMapping(mappedEdges);
	for (int i = 0; i < mappedEdges.size(); i++)
	{
		cout << (*mappedEdges[i]) << endl;
	}
	findFieldMatch(mappedEdges, H);

	if (imgDst.cols > 0)
	{
		// Draw lines
		for (int i = 0; i < lines.size(); i++)
		{
			cv::Vec4i v = lines[i];
			cv::line(imgDst, cv::Point(v[0], v[1]), cv::Point(v[2], v[3]),
					cv::Scalar(0, 255, 0), 2);
		}

		// draw corners
		for (int i = 0; i < cornerBuffer.size(); i++)
		{
			cv::circle(imgDst, cornerBuffer.at(i), 1, cv::Scalar(255, 0, 0), 2);
		}

		// draw filtered corners
		for (int i = 0; i < detectedCorners.size(); i++)
		{
			circle(imgDst, detectedCorners[i]->point, (int) 20,
					Scalar(0, 255, 255), 1);
		}

		// draw detected corner coordinates
		for (int i = 0; i < detectedCorners.size(); i++)
		{
			stringstream ss;
			ss << detectedCorners[i]->point;
			putText(imgDst, ss.str(),
					detectedCorners[i]->point + Point2f(0, 10),
					FONT_HERSHEY_PLAIN, 1, Scalar(250, 0, 0));
		}
	}
}
Mat CameraInteraction::Testmm(Mat frame){




            vector<vector<Point> > contours;


            //Update the current background model and get the foreground
            if(backgroundFrame>0)
            {bg.operator ()(frame,fore);backgroundFrame--;}
            else
            {bg.operator()(frame,fore,0);}

            //Get background image to display it
            bg.getBackgroundImage(back);


            //Enhance edges in the foreground by applying erosion and dilation
            erode(fore,fore,Mat());
            dilate(fore,fore,Mat());


            //Find the contours in the foreground
            findContours(fore,contours,CV_RETR_EXTERNAL,CV_CHAIN_APPROX_NONE);
            for(int i=0;i<contours.size();i++)
                //Ignore all small insignificant areas
                if(contourArea(contours[i])>=5000)
                {
                    //Draw contour
                    vector<vector<Point> > tcontours;
                    tcontours.push_back(contours[i]);
                    drawContours(frame,tcontours,-1,cv::Scalar(0,0,255),2);

                    //Detect Hull in current contour
                    vector<vector<Point> > hulls(1);
                    vector<vector<int> > hullsI(1);
                    convexHull(Mat(tcontours[0]),hulls[0],false);
                    convexHull(Mat(tcontours[0]),hullsI[0],false);
                    drawContours(frame,hulls,-1,cv::Scalar(0,255,0),2);

                    //Find minimum area rectangle to enclose hand
                    RotatedRect rect=minAreaRect(Mat(tcontours[0]));

                    //Find Convex Defects
                    vector<Vec4i> defects;
                    if(hullsI[0].size()>0)
                    {
                        Point2f rect_points[4]; rect.points( rect_points );
                        for( int j = 0; j < 4; j++ )
                            line( frame, rect_points[j], rect_points[(j+1)%4], Scalar(255,0,0), 1, 8 );
                        Point rough_palm_center;
                        convexityDefects(tcontours[0], hullsI[0], defects);
                        if(defects.size()>=3)
                        {
                            vector<Point> palm_points;
                            for(int j=0;j<defects.size();j++)
                            {
                                int startidx=defects[j][0]; Point ptStart( tcontours[0][startidx] );
                                int endidx=defects[j][1]; Point ptEnd( tcontours[0][endidx] );
                                int faridx=defects[j][2]; Point ptFar( tcontours[0][faridx] );
                                //Sum up all the hull and defect points to compute average
                                rough_palm_center+=ptFar+ptStart+ptEnd;
                                palm_points.push_back(ptFar);
                                palm_points.push_back(ptStart);
                                palm_points.push_back(ptEnd);
                            }

                            //Get palm center by 1st getting the average of all defect points, this is the rough palm center,
                            //Then U chose the closest 3 points ang get the circle radius and center formed from them which is the palm center.
                            rough_palm_center.x/=defects.size()*3;
                            rough_palm_center.y/=defects.size()*3;
                            Point closest_pt=palm_points[0];
                            vector<pair<double,int> > distvec;
                            for(int i=0;i<palm_points.size();i++)
                                distvec.push_back(make_pair(dist(rough_palm_center,palm_points[i]),i));
                            sort(distvec.begin(),distvec.end());

                            //Keep choosing 3 points till you find a circle with a valid radius
                            //As there is a high chance that the closes points might be in a linear line or too close that it forms a very large circle
                            pair<Point,double> soln_circle;
                            for(int i=0;i+2<distvec.size();i++)
                            {
                                Point p1=palm_points[distvec[i+0].second];
                                Point p2=palm_points[distvec[i+1].second];
                                Point p3=palm_points[distvec[i+2].second];
                                soln_circle=circleFromPoints(p1,p2,p3);//Final palm center,radius
                                if(soln_circle.second!=0)
                                    break;
                            }

                            //Find avg palm centers for the last few frames to stabilize its centers, also find the avg radius
                            palm_centers.push_back(soln_circle);
                            if(palm_centers.size()>10)
                                palm_centers.erase(palm_centers.begin());

                            Point palm_center;
                            double radius=0;
                            for(int i=0;i<palm_centers.size();i++)
                            {
                                palm_center+=palm_centers[i].first;
                                radius+=palm_centers[i].second;
                            }
                            palm_center.x/=palm_centers.size();
                            palm_center.y/=palm_centers.size();
                            radius/=palm_centers.size();

                            //Draw the palm center and the palm circle
                            //The size of the palm gives the depth of the hand
                            circle(frame,palm_center,5,Scalar(144,144,255),3);
                            circle(frame,palm_center,radius,Scalar(144,144,255),2);

                            //Detect fingers by finding points that form an almost isosceles triangle with certain thesholds
                            int no_of_fingers=0;
                            for(int j=0;j<defects.size();j++)
                            {
                                int startidx=defects[j][0]; Point ptStart( tcontours[0][startidx] );
                                int endidx=defects[j][1]; Point ptEnd( tcontours[0][endidx] );
                                int faridx=defects[j][2]; Point ptFar( tcontours[0][faridx] );
                                //X o--------------------------o Y
                                double Xdist=sqrt(dist(palm_center,ptFar));
                                double Ydist=sqrt(dist(palm_center,ptStart));
                                double length=sqrt(dist(ptFar,ptStart));

                                double retLength=sqrt(dist(ptEnd,ptFar));
                                //Play with these thresholds to improve performance
                                if(length<=3*radius&&Ydist>=0.4*radius&&length>=10&&retLength>=10&&max(length,retLength)/min(length,retLength)>=0.8)
                                    if(min(Xdist,Ydist)/max(Xdist,Ydist)<=0.8)
                                    {
                                        if((Xdist>=0.1*radius&&Xdist<=1.3*radius&&Xdist<Ydist)||(Ydist>=0.1*radius&&Ydist<=1.3*radius&&Xdist>Ydist))
                                            line( frame, ptEnd, ptFar, Scalar(0,255,0), 1 ),no_of_fingers++;
                                    }


                            }

                            no_of_fingers=min(5,no_of_fingers);
                            qDebug()<<"NO OF FINGERS: "<<no_of_fingers;
                            //mouseTo(palm_center.x,palm_center.y);//Move the cursor corresponding to the palm
                            if(no_of_fingers<4)//If no of fingers is <4 , click , else release
//                                mouseClick();
                                qDebug()<<"Test";
                            else
//                                mouseRelease();
                                qDebug()<<"Hola";

                        }
                    }

                }
            if(backgroundFrame>0)
                putText(frame, "Recording Background", cvPoint(30,30), FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(200,200,250), 1, CV_AA);
//            imshow("Framekj",frame);
//            imshow("Background",back);
return frame;

}
Example #30
0
int PoseGraph::detectLoop(KeyFrame* keyframe, int frame_index)
{
    // put image into image_pool; for visualization
    cv::Mat compressed_image;
    if (DEBUG_IMAGE)
    {
        int feature_num = keyframe->keypoints.size();
        cv::resize(keyframe->image, compressed_image, cv::Size(376, 240));
        putText(compressed_image, "feature_num:" + to_string(feature_num), cv::Point2f(10, 10), CV_FONT_HERSHEY_SIMPLEX, 0.4, cv::Scalar(255));
        image_pool[frame_index] = compressed_image;
    }
    TicToc tmp_t;
    //first query; then add this frame into database!
    QueryResults ret;
    TicToc t_query;
    db.query(keyframe->brief_descriptors, ret, 4, frame_index - 50);
    //printf("query time: %f", t_query.toc());
    //cout << "Searching for Image " << frame_index << ". " << ret << endl;

    TicToc t_add;
    db.add(keyframe->brief_descriptors);
    //printf("add feature time: %f", t_add.toc());
    // ret[0] is the nearest neighbour's score. threshold change with neighour score
    bool find_loop = false;
    cv::Mat loop_result;
    if (DEBUG_IMAGE)
    {
        loop_result = compressed_image.clone();
        if (ret.size() > 0)
            putText(loop_result, "neighbour score:" + to_string(ret[0].Score), cv::Point2f(10, 50), CV_FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(255));
    }
    // visual loop result 
    if (DEBUG_IMAGE)
    {
        for (unsigned int i = 0; i < ret.size(); i++)
        {
            int tmp_index = ret[i].Id;
            auto it = image_pool.find(tmp_index);
            cv::Mat tmp_image = (it->second).clone();
            putText(tmp_image, "index:  " + to_string(tmp_index) + "loop score:" + to_string(ret[i].Score), cv::Point2f(10, 50), CV_FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(255));
            cv::hconcat(loop_result, tmp_image, loop_result);
        }
    }
    // a good match with its nerghbour
    if (ret.size() >= 1 &&ret[0].Score > 0.05)
        for (unsigned int i = 1; i < ret.size(); i++)
        {
            //if (ret[i].Score > ret[0].Score * 0.3)
            if (ret[i].Score > 0.015)
            {          
                find_loop = true;
                int tmp_index = ret[i].Id;
                if (DEBUG_IMAGE && 0)
                {
                    auto it = image_pool.find(tmp_index);
                    cv::Mat tmp_image = (it->second).clone();
                    putText(tmp_image, "loop score:" + to_string(ret[i].Score), cv::Point2f(10, 50), CV_FONT_HERSHEY_SIMPLEX, 0.4, cv::Scalar(255));
                    cv::hconcat(loop_result, tmp_image, loop_result);
                }
            }

        }
/*
    if (DEBUG_IMAGE)
    {
        cv::imshow("loop_result", loop_result);
        cv::waitKey(20);
    }
*/
    if (find_loop && frame_index > 50)
    {
        int min_index = -1;
        for (unsigned int i = 0; i < ret.size(); i++)
        {
            if (min_index == -1 || (ret[i].Id < min_index && ret[i].Score > 0.015))
                min_index = ret[i].Id;
        }
        return min_index;
    }
    else
        return -1;

}