bool findTop(cv::Point& top, int& topVal, cv::Mat* src, cv::Rect rect){ cv::Point res(0, 0); int x = 0, y = 0; bool bFound = false; topVal = 65535; if (!src->empty()){ try { for (int i = rect.tl().y; i < rect.br().y; ++i){ for (int j = rect.tl().x; j < rect.br().x; ++j){ { Int16 curVarVec = Convert::ToInt16((src->at<Int16>(cv::Point(j, i))) * 255.0f / 8191.0f); if (curVarVec < topVal && curVarVec > 0) { topVal = curVarVec; x = j; y = i; bFound = true; } } } } } catch(...) { // DO NOTHING } } //ht.nNose = src->at<Int16>(0,0); if(bFound) top = cv::Point(x,y); return bFound; }
//retourne vrai si la boundingBox a contient la bounding box b bool contient(cv::Rect a, cv::Rect b) { if ((a.tl().x < b.tl().x) && (a.tl().y < b.tl().y) && (a.br().x > b.br().x) && (a.br().y > b.br().y)) return true; return false; }
void BlobPeople::update(const cv::Rect& track) { cur = toOf(track).getCenter(); smooth.interpolate(cur, 0.5); height = track.tl().y-track.br().y; bottom = track.br().y; left = track.tl().x; right = track.br().x; }
void TrackFace::drawFace(cv::Mat & frame, cv::Rect & face_n, string box_text) { rectangle(frame, face_n, CV_RGB(0,255,0),1); int pos_x=std::max(face_n.tl().x-10, 0); int pos_y=std::max(face_n.tl().y-10, 0); putText(frame, box_text, Point(pos_x, pos_y), FONT_HERSHEY_PLAIN, 1.0, CV_RGB(0,255,0),2.0); }
void LevelTracker::setRegion(const cv::Rect &roi) { topLeft=roi.tl(); bottonRight=roi.br(); if(topLeft.x<bottonRight.x && topLeft.y<bottonRight.y) regOk=true; }
vector<cv::Point2d> MapShape(cv::Rect original_face_rect, const vector<cv::Point2d> original_landmarks, cv::Rect new_face_rect) { vector<cv::Point2d> result; for (const cv::Point2d &landmark: original_landmarks) { result.push_back(landmark); result.back() -= static_cast<cv::Point2d>(original_face_rect.tl()); result.back().x *= static_cast<double>(new_face_rect.width) / original_face_rect.width; result.back().y *= static_cast<double>(new_face_rect.height) / original_face_rect.height; result.back() += static_cast<cv::Point2d>(new_face_rect.tl()); } return result; }
bool GeoMapEditor::addMouseObject( // пытаемся добавить новый объект вытянув или кликнув мышкой cv::Rect& rect, // note: in-out -- подкручиваем ректангл по законам первого рождения для данного объекта int flags ) { if (objType() == "AGM_Segm") { Point xyTL = rect.tl(); Point xyBR = rect.br(); GeoSheet& sh = gm.sheets[ cur_sheet ]; Point2d enTL = sh.xy2en( xyTL ); Point2d enBR = sh.xy2en( xyBR ); AGM_Segm* ps = new AGM_Segm(enTL, enBR); gm.objects.push_back(cv::Ptr<AGM_Segm>(ps)); } else { Point xy = center( rect ); GeoSheet& sh = gm.sheets[ cur_sheet ]; Point2d en = sh.xy2en( xy ); AGM_Point* pp = new AGM_Point( en ); gm.objects.push_back(cv::Ptr<AGM_Point>(pp)); } return true; };
static bool checkColision(const cv::Rect &a, const cv::Rect &b){ if (b.contains(a.tl())) return true; if (b.contains(a.br())) return true; if (b.contains(cv::Point(a.x+a.width,a.y))) return true; if (b.contains(cv::Point(a.x,a.y+a.height))) return true; return false; }
bool Target::init(KVConfig *cfg, int id, const cv::Rect &roi, const cv::Mat &curr_gray, double stamp, double min_dis_5frames, int min_pts, int max_feature_pts, double qualitylevel) { first_rc_ = roi; outer_.x = 0, outer_.y = 0, outer_.width = curr_gray.cols, outer_.height = curr_gray.rows; stamp_ = stamp; cfg_ = cfg; id_ = id; min_dis_5frames_ = min_dis_5frames; min_pts_ = min_pts; stopped_dis_ = atof(cfg->get_value("pd_target_stopped_dis", "2.0")); //int max_features_pts = atoi(cfg->get_value("pd_max_features_pts", "300")); int max_features_pts = 200; PTS pts; //cv::goodFeaturesToTrack(curr_gray(roi), pts, max_feature_pts, qualitylevel, 1.5); hi_goodFeaturesToTrack(curr_gray(roi), pts, 200, qualitylevel, 1.5); if ((int)pts.size() < min_pts_) { return false; } l2g(pts, roi.tl()); layers_.push_back(pts); brc_ = roi; last_rc_ = cv::boundingRect(pts); return true; }
Face::Face(cv::Rect &r) { cv::Point tl = r.tl(); m_center = cv::Point(tl.x + r.width/2, tl.y + r.height/2); m_width = r.width; m_height = r.height; m_weight = 1; }
inline static cv::Point center(const cv::Rect &rc) { std::vector<cv::Point> pts; pts.push_back(rc.tl()); pts.push_back(rc.br()); return center(pts); }
bool ARC_Pair::set_reflection ( cv::Mat img, cv::Rect r, cv::Size s ) { roi.reflection = convert_to_point( r, img, s ); if( roi.reflection==cv::Point( -1, -1 ) ) roi.reflection=r.tl()-0.5*cv::Point(s); return true; } /* ----- end of method ARC_Pair::set_reflection ----- */
// A litte subroutine to draw a box onto an image // void draw_box(cv::Mat& img, cv::Rect box) { cv::rectangle( img, box.tl(), box.br(), cv::Scalar(0x00, 0x00, 0xff) /* red */ ); }
/** determine if a rectangle contains "blank" pixels **/ bool Cropper::rectHasBlankPixels(const cv::Rect &roi) { Point2i tr(roi.x + roi.width - 1, roi.y); Point2i bl(roi.x, roi.y + roi.height - 1); Point2i br(roi.x + roi.width - 1, roi.y + roi.height - 1); if(lineHasBlankPixels(roi.tl(), tr)) return true; if(lineHasBlankPixels(tr, br)) return true; if(lineHasBlankPixels(bl, br)) return true; if(lineHasBlankPixels(roi.tl(), bl)) return true; return false; }
void Rectangle::set(const cv::Rect &_rect) { rect = _rect; tl = _rect.tl(); br = _rect.br(); left->set(tl, cv::Point(tl.x, br.y)); top->set(tl, cv::Point(br.x, tl.y)); bottom->set(tl, cv::Point(tl.x, br.y)); right->set(cv::Point(br.x, tl.y), cv::Point(tl.x, br.y)); info.set(_rect); }
/** * @brief Scales and translates a facebox. Useful for converting * between face boxes from different face detectors. * * To convert from V&J faceboxes to ibug faceboxes, use a scaling * of 0.85 and a translation_y of 0.2. * Ideally, we would learn the exact parameters from data. * * @param[in] facebox Input facebox. * @param[in] scaling The input facebox will be scaled by this factor. * @param[in] translation_y How much, in percent of the original facebox's width, the facebox will be translated in y direction. A positive value means facebox moves downwards. * @return The rescaled facebox. */ cv::Rect rescale_facebox(cv::Rect facebox, float scaling, float translation_y) { // Assumes a square input facebox to work? (width==height) const auto new_width = facebox.width * scaling; const auto smaller_in_px = facebox.width - new_width; const auto new_tl = facebox.tl() + cv::Point2i(smaller_in_px / 2.0f, smaller_in_px / 2.0f); const auto new_br = facebox.br() - cv::Point2i(smaller_in_px / 2.0f, smaller_in_px / 2.0f); cv::Rect rescaled_facebox(new_tl, new_br); rescaled_facebox.y += facebox.width * translation_y; return rescaled_facebox; };
Cluster::Cluster(int count, const cv::Rect &boundsInWhole, std::vector<cv::Point> deepest, int smoothing) : boundsInWhole(boundsInWhole), offset(boundsInWhole.tl()), count(count), deepest(deepest), smoothing(smoothing) { }
/** expand a roi **/ void Cropper::expandROI(cv::Rect &r) { bool canExpand = true; Point2i p1, p2; cv::Rect rp; while(canExpand){ canExpand = false; Point2i stp = (rect.tl() - r.tl()); stp.x /=2; stp.y /=2; p2 = r.br(); while(abs(stp.x) + abs(stp.y)>=1){ p1 = r.tl() + stp; rp = cv::Rect(p1, p2); if(!rectHasBlankPixels(rp)){ r = rp; canExpand = true; break; } stp.x /= 2; stp.y /=2; } stp = (rect.br() - r.br()); stp.x /=2; stp.y /=2; p1 = r.tl(); while(abs(stp.x) + abs(stp.y)>=1){ p2 = r.br() + stp; rp = cv::Rect(p1, p2); if(!rectHasBlankPixels(rp)){ r = rp; canExpand = true; break; } stp.x /=2; stp.y/=2; } } }
std::pair<double, cv::Point> DescribeMaximum(const cv::Mat &surface, const cv::Rect &boundingBox) { // assert surface double cv::Point position = boundingBox.tl(); cv::Point end = boundingBox.br(); size_t x = position.x, y = position.y; double value = surface.at<double>(position); for (; x <= end.x; x++) { for (; y <= end.y; y++) { if (surface.at<double>(y, x) > value) { value = surface.at<double>(y, x); position = cv::Point(x, y); } } } return std::make_pair(value, position); }
// === FUNCTION ====================================================================== // Name: get_masked_frame // Description: Masks the frame based on slope and roi. Mask returned by pointer. // ===================================================================================== cv::Mat get_masked_frame ( cv::Rect roi, double slope, cv::Mat* frame, cv::Mat* mask ) { cv::Point corners[1][4]; //Set the frame *mask=cv::Mat::zeros( frame->size(), CV_8UC1 ); cv::Mat masked_frame; if( slope==0 ) { // TODO: Could use direction handling here. corners[0][0] = roi.br(); corners[0][1] = cv::Point( frame->cols, roi.y+roi.height ); corners[0][2] = corners[0][1]-cv::Point( 0, roi.height ); corners[0][3] = corners[0][0]-cv::Point( 0, roi.height ); } else if( isinf( slope ) ) { { corners[0][0] = cv::Point( roi.x, frame->rows ); corners[0][1] = cv::Point( roi.x, roi.y+roi.height); } { corners[0][0] = roi.tl(); corners[0][1] = cv::Point( roi.x, 0 ); } corners[0][2] = corners[0][1]+cv::Point( roi.width, 0); corners[0][3] = corners[0][0]+cv::Point( roi.width, 0 ); } else { corners[0][0].x = ( int ) ( (frame->rows + slope*roi.x-roi.y)/slope ); corners[0][0].y = frame->rows; corners[0][1] = cv::Point( ( int )( (-roi.y + slope * roi.x ) / slope ), 0 ); corners[0][2] = corners[0][1] + cv::Point(roi.width, 0); corners[0][3] = corners[0][0] + cv::Point(roi.width, 0); } // This is weird, but follows OpenCV docs. const cv::Point* ppt[1] = { corners[0] }; const int npt[] = { 4 }; fillPoly( *mask, ppt, npt, 1, 255 ); frame->copyTo(masked_frame, *mask); return masked_frame; } // ----- end of function get_masked_frame -----
Rectangle::Rectangle(const cv::Rect &_rect, const int &_id) : rect(_rect) { tl = _rect.tl(); br = _rect.br(); left = new VerticalLine(); left->set(tl, cv::Point(tl.x, br.y)); top = new HorizontalLine(); top->set(tl, cv::Point(br.x, tl.y)); bottom = new HorizontalLine(); bottom->set(cv::Point(tl.x, br.y), br); right = new VerticalLine(); right->set(cv::Point(br.x, tl.y), br); info.set(_id, _rect); selected = false; selected_color = RED_COLOR; offset = 4; lineOffset = LINE_WEIGHT; color = cv::Scalar( rng.uniform(0, 255), rng.uniform(0,255), rng.uniform(0,255) ); }
void singleeyefitter::cvx::getROI(const cv::Mat& src, cv::Mat& dst, const cv::Rect& roi, int borderType) { cv::Rect bbSrc = boundingBox(src); cv::Rect validROI = roi & bbSrc; if (validROI == roi) { dst = cv::Mat(src, validROI); } else { // Figure out how much to add on for top, left, right and bottom cv::Point tl = roi.tl() - bbSrc.tl(); cv::Point br = roi.br() - bbSrc.br(); int top = std::max(-tl.y, 0); // Top and left are negated because adding a border int left = std::max(-tl.x, 0); // goes "the wrong way" int right = std::max(br.x, 0); int bottom = std::max(br.y, 0); cv::Mat tmp(src, validROI); cv::copyMakeBorder(tmp, dst, top, bottom, left, right, borderType); } }
bool respond(const Bottle & command, Bottle & reply) { // Get command string string receivedCmd = command.get(0).asString().c_str(); bool f; int responseCode; //Will contain Vocab-encoded response reply.clear(); // Clear reply bottle if (receivedCmd == "getPointClick") { if (! getPointClick()) { //Encode response responseCode = Vocab::encode("nack"); reply.addVocab(responseCode); } else { //Encode response responseCode = Vocab::encode("ack"); reply.addVocab(responseCode); Bottle& bCoords = reply.addList(); bCoords.addInt(coords2D(0)); bCoords.addInt(coords2D(1)); //bCoords.addDouble(coords3D(2)); } return true; } else if (receivedCmd == "getPointTrack") { tableHeight = command.get(1).asDouble(); if (!getPointTrack(tableHeight)) { //Encode response responseCode = Vocab::encode("nack"); reply.addVocab(responseCode); } else { //Encode response printf("Sending out 3D point!!\n"); responseCode = Vocab::encode("ack"); reply.addVocab(responseCode); Bottle& bCoords = reply.addList(); bCoords.addDouble(coords3D(0)); bCoords.addDouble(coords3D(1)); bCoords.addDouble(coords3D(2)); printf("Coords Bottle %s \n", bCoords.toString().c_str()); printf("sent reply %s \n", reply.toString().c_str()); } return true; } else if (receivedCmd == "getBox") { if (!getBox()) { //Encode response responseCode = Vocab::encode("nack"); reply.addVocab(responseCode); } else { //Encode response responseCode = Vocab::encode("ack"); reply.addVocab(responseCode); Bottle& bBox = reply.addList(); bBox.addInt(BBox.tl().x); bBox.addInt(BBox.tl().y); bBox.addInt(BBox.br().x); bBox.addInt(BBox.br().y); } } else if (receivedCmd == "help") { reply.addVocab(Vocab::encode("many")); reply.addString("Available commands are:"); reply.addString("help"); reply.addString("quit"); reply.addString("getPointClick - reads a click and returns the 2D coordinates"); reply.addString("getPointTrack - Retrieves 2D coords and returns the 3D coordinates of the object tracked based on the table Height"); reply.addString("getBox - Crops the image based on user input and creates a template for the tracker with it"); return true; } else if (receivedCmd == "quit") { reply.addString("Quitting."); return false; } else{ reply.addString("Invalid command, type [help] for a list of accepted commands."); } return true; }
//+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // // 自訂的矩行繪製函數 // //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void DrawRectangle( Mat& img, cv::Rect box) { //隨機顏色 rectangle(img, box.tl(), box.br(), Scalar(g_rng.uniform(0,255), g_rng.uniform(0,255), g_rng.uniform(0,255))); }
Inpaint is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Inpaint. If not, see <http://www.gnu.org/licenses/>. */ #include "catch.hpp" #include "random_testdata.h" #include <inpaint/template_match_candidates.h> #include <opencv2/opencv.hpp> using namespace Inpaint; TEST_CASE("template-match-candidates") { cv::Mat img = randomLinesImage(200, 40); cv::cvtColor(img, img, cv::COLOR_GRAY2BGR); cv::Rect r; cv::Mat block = randomBlock(img, r); cv::Mat candidates; findTemplateMatchCandidates(img, block, cv::Mat(), candidates, cv::Size(4,4), 0); REQUIRE(candidates.at<uchar>(r.tl()) != 0); REQUIRE(cv::countNonZero(candidates) < 20); }
void GroundTruthEvaluation::evaluateLocalizer(const int currentFrameNumber, taglist_t const& taglist) { GroundTruth::LocalizerEvaluationResults& results = _localizerResults; // iterate over ground truth data (typed Grid3D), convert them to PipelineGrids // and store them in the std::set taggedGridsOnFrame for (GroundTruthGridSPtr const& grid : _groundTruthData.at(currentFrameNumber)) { if (grid) { results.taggedGridsOnFrame.insert(grid); } } // Detect False Negatives for (const GroundTruthGridSPtr& grid : results.taggedGridsOnFrame) { // ROI of ground truth const cv::Rect gridBox = grid->getBoundingBox(); bool inGroundTruth = false; for (const pipeline::Tag& tag : taglist) { // ROI of pipeline tag const cv::Rect& tagBox = tag.getRoi(); // target property is complete containment if (tagBox.contains(gridBox.tl()) && tagBox.contains(gridBox.br())) { inGroundTruth = true; break; } } // mark current ground truth box as missing detection (false negative) if (!inGroundTruth) results.falseNegatives.insert(grid); } // Detect False Positives // ground truth grids std::set<std::shared_ptr<PipelineGrid>> notYetDetectedGrids = results.taggedGridsOnFrame; // iterate over all pipeline tags for (const pipeline::Tag& tag : taglist) { // ROI of pipeline tag const cv::Rect& tagBox = tag.getRoi(); bool inGroundTruth = false; // iterate over all ground truth grids for (const std::shared_ptr<PipelineGrid>& grid : notYetDetectedGrids) { // ROI of ground truth grid const cv::Rect gridBox = grid->getBoundingBox(); if (tagBox.contains(gridBox.tl()) && tagBox.contains(gridBox.br())) { // match! inGroundTruth = true; // store pipeline tag in extra set results.truePositives.insert(tag); // store mapping from roi -> grid results.gridByTag[tag] = grid; // this modifies the container in a range based for loop, which may invalidate // the iterators. This is not problem in this specific case because we exit the loop // right away. (beware if that is going to be changed) notYetDetectedGrids.erase(grid); break; } } // if no match was found, the pipeline-reported roi is a false detection if (!inGroundTruth) results.falsePositives.insert(tag); } }
//-----------------------------------【DrawRectangle( )函数】------------------------------ // 描述:自定义的矩形绘制函数 //----------------------------------------------------------------------------------------------- void DrawRectangle( cv::Mat& img, cv::Rect box ) { cv::rectangle(img,box.tl(),box.br(),cv::Scalar(g_rng.uniform(0, 255), g_rng.uniform(0,255), g_rng.uniform(0,255)));//随机颜色 }
int main( int argc, char** argv) { // Initialization traxmode = false; silent = false; interactive = false; opterr = 0; initializeFile = NULL; configFile = NULL; configString = NULL; outputFile = NULL; #ifdef BUILD_INTROSPECTION introspectionFile = NULL; #endif int c = 0; seed = time(NULL); initialized = false; initialize = false; run = true; while ((c = getopt(argc, argv, CMD_OPTIONS)) != -1) switch (c) { case 'h': print_help(); exit(0); case 's': silent = true; break; case 'g': enable_gui(); break; case 'i': interactive = true; break; #ifdef BUILD_DEBUG case 'd': __debug_enable(); break; #endif case 'C': configFile = optarg; break; case 'c': configString = optarg; break; case 'I': initializeFile = optarg; break; case 'o': outputFile = optarg; break; #ifdef BUILD_INTROSPECTION case 'D': introspectionFile = optarg; break; #endif case 'S': seed = atoi(optarg); break; #ifdef BUILD_TRAX case 't': traxmode = true; break; #endif default: print_help(); fprintf(stderr, "Unknown switch '-%c'\n", optopt); exit(-1); } RANDOM_SEED(seed); DEBUGMSG("Random seed: %d \n", seed); sequence = NULL; if (!traxmode) { if (optind < argc) { sequence = open_sequence(argv[optind]); if (!sequence) { fprintf(stderr, "Illegal source\n"); exit(-1); } VideoFileSequence* videostream = dynamic_cast<VideoFileSequence*>(sequence); ImageDirectorySequence* dirstream = dynamic_cast<ImageDirectorySequence*>(sequence); FileListSequence* liststream = dynamic_cast<FileListSequence*>(sequence); if (videostream != NULL) { const char* filename = videostream->get_file(); initialize = true; if (!initializeFile && filename) { initializeFile = new char[strlen(filename) + 5]; strcpy(initializeFile, filename); strcpy(initializeFile+strlen(filename), ".txt"); } } else if (dirstream != NULL) { const char* filename = dirstream->get_directory(); initialize = true; if (!initializeFile && filename) { initializeFile = new char[strlen(filename) + 5]; strcpy(initializeFile, filename); strcpy(initializeFile + strlen(filename) + (matches_suffix(filename, FILE_DELIMITER_STR) ? -1 : 0), ".txt"); } } else if (liststream != NULL && initializeFile) { initialize = true; } else if (dynamic_cast<CameraSequence*>(sequence) != NULL) { interactive = false; } } else { fprintf(stderr, "No source given\n"); exit(-1); } } // Tracking tracking_window = is_gui_enabled() ? ImageWindow::create(WINDOW_NAME, Size(10, 10), false) : NULL; if (configFile) { if (file_type(configFile) == FILETYPE_FILE) { DEBUGMSG("Reading config from '%s' \n", configFile); config.load_config(string(configFile)); } else { DEBUGMSG("Reading built-in configuration '%s' \n", configFile); read_builtin_config(configFile, config); } } if (configString) { for (int i = 0; i < strlen(configString); i++) { if (configString[i] == ';') configString[i] = '\n'; } istringstream moreConfig(configString); moreConfig >> config; } DEBUGGING { vector<string> configKeys = config.keys(); for (int i = 0; i < configKeys.size(); i++) { string value = config.read<string>(configKeys[i]); DEBUGMSG("Config dump: %s = %s\n", configKeys[i].c_str(), value.c_str()); } } initialize_canvases(config); if (config.keyExists("window.main.output")) { tracking_window->set_output(config.read<string>("window.main.output"), config.read<int>("window.main.output.slowdown", 1)); } #ifdef BUILD_TRAX trax_properties* trax_out_properties = NULL; if (traxmode) { #ifdef TRAX_DEBUG trax_setup("trax.log", TRAX_LOG_INPUT | TRAX_LOG_OUTPUT | TRAX_LOG_DEBUG); #else trax_setup(NULL, 0); #endif trax_out_properties = trax_properties_create(); } #endif Ptr<Tracker> tracker; Image frame; int frameNumber = 1; ImageWindow::grid(2, 3, 400, 400); std::ofstream rectangle_output; if (outputFile) rectangle_output.open( outputFile , std::ifstream::out); for(; run;) { long start_time = clock(); char key = -1; ///////////////////// GET IMAGE //////////////////// if (!traxmode) { frame.capture(sequence); } #ifdef BUILD_TRAX else { trax_imagepath path; trax_rectangle rect; trax_properties* prop = trax_properties_create(); int tr = trax_wait(path, prop, &rect); if (tr == TRAX_INIT) { start.x = rect.x; start.y = rect.y; start.width = rect.width; start.height = rect.height; trax_properties_enumerate(prop, trax_properties_to_config, &config); initialize = true; initialized = false; } else if (tr == TRAX_FRAME) { if (!initialized) { trax_properties_release(&prop); break; } } else { trax_properties_release(&prop); break; } trax_properties_release(&prop); frame.load(string(path)); trax_properties_clear(trax_out_properties); } #endif if (frame.empty()) { DEBUGMSG("No new frame available, quitting.\n"); break; } if (frameNumber == 1 && !traxmode) { if (is_gui_enabled()) tracking_window->resize(Size(frame.width(), frame.height())); int size = MAX(frame.width() / 8, frame.height() / 8); start.width = size; start.height = size; start.x = (frame.width() - start.width) / 2; start.y = (frame.height() - start.height) / 2; } ///////////////////// PROCESSING //////////////////// if (!initialized && initialize) { DEBUGMSG("Initializing using rectangle from '%s'\n", initializeFile); if (initializeFile) { DEBUGMSG("Initializing using rectangle from '%s'\n", initializeFile); start = read_rectangle(initializeFile); if (is_gui_enabled() && (start.width < 1 || start .height < 1)) { Mat rgb = frame.get_rgb(); tracking_window->draw(rgb); tracking_window->push(); if (tracking_window->queryrect(start)) { write_rectangle(initializeFile, start); } } } if (tracker.empty()) { if (!config.keyExists("tracker")) throw LegitException("Unknown tracker type"); tracker = create_tracker(config.read<string>("tracker"), config, "default"); if (!silent) { performance_observer = new PerformanceObserver(tracker->get_stages()); tracker->add_observer(performance_observer); } #ifdef BUILD_TRAX if (traxmode) { trax_observer = new TraxObserver(); tracker->add_observer(trax_observer); trax_properties_set_int(trax_out_properties, "seed", seed); } #endif #ifdef BUILD_INTROSPECTION if (introspectionFile) { introspection_observer = new IntrospectionObserver(frame.width(), frame.height(), seed); ((IntrospectionObserver *)&(*introspection_observer))->configuration(config); tracker->add_observer(introspection_observer); } #endif } tracker->initialize(frame, start); initialized = true; } else if (!tracker.empty()) { frameNumber++; if (!traxmode && sequence->is_finite() && sequence->position() >= sequence->size()) { DEBUGMSG("End frame reached, quitting.\n"); run = false; } long timer = clock(); tracker->update(frame); if (!silent && !traxmode) printf("Frame %d - elapsed time: %d ms\n", frameNumber, (int)(((clock() - timer) * 1000) / CLOCKS_PER_SEC)); } ///////////////////// OUTPUT RESULTS //////////////////// #ifdef BUILD_TRAX if (traxmode) { trax_rectangle region; if (tracker->is_tracking()) { Rect rect = tracker->region(); region.x = rect.x; region.y = rect.y; region.width = rect.width; region.height = rect.height; } else { region.x = 0; region.y = 0; region.width = 0; region.height = 0; } trax_report_position(region, trax_out_properties);// properties); } #endif if (is_gui_enabled() && tracking_window) { Mat rgb = frame.get_rgb(); tracking_window->draw(rgb); if (initialized) { tracker->visualize(*tracking_window); cv::Rect region = tracker->region(); tracking_window->rectangle(region, COLOR_YELLOW, 2); } else { tracking_window->rectangle(start.tl(), start.br(), Scalar(255, 255, 255), 1); } tracking_window->push(); } if (rectangle_output.is_open()) { cv::Rect rectangle = initialized ? tracker->region() : start; rectangle_output << rectangle.x << "," << rectangle.y << "," << rectangle.width << "," << rectangle.height << std::endl; } ///////////////////// HANDLE INPUT //////////////////// if (!traxmode) { if (is_gui_enabled()) { if (interactive) { switch (pause()) { case 27: run = false; break; case 'p': interactive = !interactive; break; } } else { long wait = throttle ? MAX(1, (limitFrameTime - (clock() - start_time))) : 1; key = waitKey(wait); switch (key) { case 27: run = false; break; case ' ': initialize = true; break; } } } else { if (throttle) { long wait = MAX(1, (limitFrameTime - (clock() - start_time))); sleep(wait); } } if (!tracker.empty() && !tracker->is_tracking()) { DEBUGMSG("Target lost\n"); run = false; } } #ifdef BUILD_TRAX else { if (is_gui_enabled()) waitKey(1); } #endif } if (!performance_observer.empty()) { ((PerformanceObserver *)&(*performance_observer))->print(); } // Cleanup #ifdef BUILD_INTROSPECTION if (!introspection_observer.empty()) { DEBUGMSG("Storing introspection data to %s\n", introspectionFile); ((IntrospectionObserver *)&(*introspection_observer))->store(introspectionFile, true); } #endif #ifdef BUILD_TRAX trax_properties_release(&trax_out_properties); if (traxmode) trax_cleanup(); #endif if (rectangle_output.is_open()) { rectangle_output.close(); } if (sequence) delete sequence; }