コード例 #1
0
void WayFinderApp::setup()
{
    println("WayFinderApp started.");

    // Load destinations from config file.
    destinations = Destination::getDestinations();
    if(destinations.size() == 0) {
        println("No destinations found, check the config file.");
        exit(EXIT_FAILURE);
    }
    println("Destinations loaded.");

    // Initialized state.
    spotlightRadius = (float)getWindowWidth() / 16.0f;
    arrowLength = (float)min(getWindowWidth(), getWindowHeight()) / 2.0f;
    spotlightCenter2D = Vec2f((float)getWindowWidth() / 2.0f, (float)getWindowHeight() / 2.0f);
    spotlightCenter3D = Vec3f((float)getWindowWidth() / 2.0f, (float)getWindowHeight() / 2.0f, 0.0f);
    detected = false;

    //capture = Capture::create(WayFinderApp::WIDTH, WayFinderApp::HEIGHT);
    capture = Capture::create(getWindowWidth(), getWindowHeight());
    capture->start();

    //bg.set("bShadowDetection", false);
    bg.set("nmixtures", 3);
    bg.setBool("detectShadows", true);

    debugView = false;
}
コード例 #2
0
ファイル: main.cpp プロジェクト: Charence/BiomotionVision
int main(int argc, const char** argv) {
	int persons = 1;
	const bool recalibrate = true;

	// read args
	if (argc > 1)
		persons = atoi(argv[1]);

	// setup path to file
	char* filepath;
	filepath = "/home/charence/Workspace/biomotion-vision/images/set2/%d/10/frame%04d.jpg";
	// is it my mac?
	#ifdef MYMAC
	filepath = "/Users/charence/Workspace/biomotion-vision/images/set2/%d/10/frame%04d.jpg";
	#endif
	// is it on doc?
	#ifdef ONDOC
	filepath = "/media/Charence500/Data/20121221/10/%d/frame%04d.jpg";
	#endif

	int start = 0;
	int end = 2485;
	switch(persons) {
		case 1: end = 2485; break;
		case 2: end = 3621; break;
		case 3: end = 4489; break;
	}

	// setup background model
	bgmodel.set("history", history);
	bgmodel.set("varThreshold", varThreshold);
	bgmodel.set("detectShadows", true);

	// setup homography
	if (recalibrate) {
	}
	else {
	}

	// setup tracker
	pointTracker.setArguments(3.5, 50);

	//cout << "ImageNum,ContourArea,RectCentreX,RectCentreY,RectAngle,RectWidth,RectHeight,CircCentreX,CircCentreY,Radius" << endl;
	//cout << "ImageNum,ContourArea,CircCentreX,CircCentreY,Radius" << endl;

	// process sequence
	for (int i = start; i <= end; i++) {
		if (i > 200)
			i = 999; //1050; // 999; // 1050;*/
		char filename [128];
		sprintf(filename, filepath, persons, i);
		//cout << "In: " << filename << endl;
		imageNum = i;
		// load image
		cv::Mat image = cv::imread(filename);
		if (image.empty()) {
			throw runtime_error("Could not load image");
		}
		// detect objects
		learningRate = (i > 200) ? 0.00005 : 0.01;
		//delay = (i > 900) ? 500 : 5; // slows down update so I can get screencaps
		//vector<ObjectInfo> detectedObjects = detectPeople(image);
		vector<cv::Mat> detectedObjects = detectPeople(image);
		// homography
		//detectedObjects = ho
		// track objects
		//objectTracker->update(detectedObjects);
		if (i > 200) {
			trackObjects(detectedObjects);
			predictObjects(image);
		}
		// translate coordinates
	}

	cout << "Total frames: " << numFrames << endl;
	cout << "Merged frames: " << numMerged << endl;
	cout << "Split frames: " << numSplit << endl;
	cout << "Unsplit frames: " << numUnsplit << endl;

	return 0;
}