示例#1
0
//--------------------------------------------------------------
void testApp::keyPressed(int key){
	switch (key) {
		case 's':
			if (isRecording) {
				oniRecorder.stopRecord();
				isRecording = false;
				break;
			} else {
				setupRecording(generateFileName());
				oniRecorder.startRecord(currentFileName);
				isRecording = true;
				break;
			}
			break;
		case 'p':
			if (currentFileName != "" && !isRecording && isLive) {
				setupPlayback(currentFileName);
				isLive = false;
			} else {
				isLive = true;
			}
			break;
		default:
			break;
	}	
}
示例#2
0
void testApp::setup() {
	isLive = true;
	isRecording = false;

	setupRecording();
	
	ofBackground(0, 0, 0);
	
	panel.setup("Control Panel", ofGetWidth() - 315, 5, 300, 600);	
	panel.addPanel("Skeleton Tracking");	
	panel.addToggle("track skeletons", "isTracking", true);
	panel.addToggle("mask background", "isMasking", true);	
	panel.addToggle("mask cloud background", "cloudMasking", false);		
	panel.addSlider("cloud z", "zpos", 0, -5000, 5000, false);
	panel.addSlider("cloud rotation", "cloud_rotation", 0, 360, 0, false);	
	panel.addSlider("cloud point size", "cloud_point_size", 1, 1, 50, true);		
	panel.addSlider("z pixels per meter", "ppm", 20, 0, 100, false);
	panel.addToggle("show scene box", "show_scene_box", true);		
	
	if (isLive) {
		recordUser.setPointCloudRotation(0);
	}
	else {
		playUser.setPointCloudRotation(0);
	}	
	
}
//--------------------------------------------------------------
void testApp::setup() {

	isLive			= true;
	isTracking		= true;
	isTrackingHands	= false;
	isFiltering		= false;
	isRecording		= false;
	isCloud			= false;
	isCPBkgnd		= false;
	isMasking		= false;

	nearThreshold = 500;
	farThreshold  = 1000;

	filterFactor = 0.1f;

	setupRecording();

	ofSetFullscreen(true);

	ofBackground(0, 0, 0);

	CurrentState = DISPLAY_PHOTO_STATE;
	ofSetVerticalSync(false); 
				getReadyMovie.loadMovie("movies/duck and cover.mov");
	//getReadyMovie.play();
}
示例#4
0
void testApp::setup()
{
	recordingDir.open(ofToDataPath("."));
	recordingDir.allowExt("oni");
	recordingDir.listDir();

	recording = false;

	setupGui();
	if (playID < recordingFilesCount)
		setupRecording(recordingFiles[playID]);
	else
		setupRecording();

	for (int i = 0; i < kNumTestNodes; i++)
	{
		testNodes[i].setPosition(-1000, -1000, -1000);
		testNodes[i].setScale(10);

		// setup a traditional humanoid bind pose

		// left arm
		if (i == 2 || i == 4)
		{
			testNodes[i].bindPoseOrientation.makeRotate(-90.0, ofVec3f(0.0f, 0.0f, 1.0f));
		}
		else // right arm
		if (i == 3 || i == 5)
		{
			testNodes[i].bindPoseOrientation.makeRotate(90.0, ofVec3f(0.0f, 0.0f, 1.0f));
		}
		else // neck
		if (i == 10)
		{
			testNodes[i].bindPoseOrientation.makeRotate(180.0, ofVec3f(0.0f, 0.0f, 1.0f));
		}
		else
		{
			testNodes[i].bindPoseOrientation.set(0, 0, 0, 1);
		}
	}

	cam.setFov(57.8);
	cam.setFarClip(10000.f);
	float distance = 1500;
	cam.orbit(180, 0, distance, ofVec3f(0,0,distance));
}
示例#5
0
//--------------------------------------------------------------
void testApp::setup() {

	nearThreshold = 500;
	farThreshold  = 1000;
	setupRecording();
    scratch.setup();
	ofBackground(0, 0, 0);
        
}
示例#6
0
//--------------------------------------------------------------
void testApp::setup() {
    
	isLive			= true;
	isTracking		= true;
	isTrackingHands	= true;
	isFiltering		= false;
	isRecording		= false;
	isCloud			= false;
	isCPBkgnd		= true;
	isMasking		= true;
    
	nearThreshold = 500;
	farThreshold  = 1000;
    
	filterFactor = 0.1f;
    
	setupRecording();
    
	ofBackground(0, 0, 0);
    
    // CURRENT ORDER: LO-NATIVE, HI-NATIVE, LO-IMMIGRANT, HI-IMMIGRANT
    amplifier = 2; // this is to amplify the unemployment rates--we can apply it to all of them so they stay consistent but have more exaggerated differences. totally optional though. can be a float.
    
    unemploymentRates[0] = 32 * amplifier;
    unemploymentRates[1] = 6 * amplifier;
    unemploymentRates[2] = 19 * amplifier;
    unemploymentRates[3] = 6 * amplifier;
    
    currentIndex = 0;
    
    rootPoint.x = ofGetWidth()/2 - 100;
    rootPoint.y = 700;
    
    centerPoint.x = rootPoint.x;
    centerPoint.y = ofGetHeight()/2;
    
    // Shoulder points are temporary until attached to Kinect.
    // Actual shoulder should probably be used to set hand positions,
    // but then should be fixed before the player starts to stretch
    // for them.
    leftShoulderPoint.x = ofGetWidth()/2 - 150;
    leftShoulderPoint.y = ofGetHeight()/2 - 80;
    rightShoulderPoint.x = ofGetWidth()/2 - 50;
    rightShoulderPoint.y = ofGetHeight()/2 - 80;
    
    // neutral hand positions; would correspond to
    // zero unemployment
    leftHandPoint.x = ofGetWidth()/2 - 150;
    leftHandPoint.y = ofGetHeight()/2;
    rightHandPoint.x = ofGetWidth()/2 - 50;
    rightHandPoint.y = ofGetHeight()/2;
    
}
//--------------------------------------------------------------
void motionDetector::setup() {

    // hand position tracking for Ryan
    myfile.open ("handPosition.txt");
//	img.allocate(640,480);

    countElapsedPlucks = 0;
    lastObjectTouched = -1;
    for (int i=0; i<12; i++) {
        timeObjectWasLastTouched[i] = 0;
    }

    // set background color
    ofBackground(0, 0, 0);

    isRecording = false;
    isTracking		= false;
    isTrackingHands	= true;
    isFiltering		= false;
    isMasking		= true;
    nearThreshold = 200;
    farThreshold  = 1000;
    filterFactor = 0.1f;

    pluckParams.pos = 0;
    pluckParams.vel = 0;
    pluckParams.acc = 0;
    pluckParams.index = 0;

    if (filename=="")
    {
        cout << "no .oni filename detected in cmd-line, running setupRecording() \n";
        isLive = true;
        setupRecording();
    }
    else
    {
        cout << ".oni filename detected in cmd-line, running setupPlayback(filename) \n";
        isLive = false;
        setupPlayback(filename);
    }

    setupDetectors();

}
示例#8
0
//--------------------------------------------------------------
void testApp::setup() {
	
	
	
	isLive		= true;
	isTracking	= false;
	isRecording = false;
	isCloud		= false;
	isCPBkgnd	= true;
	isMasking   = true;
	
	//TM.setup();	

	ofBackground(0, 0, 0);
	
	temp.allocate(640,480, GL_LUMINANCE);
	setupRecording();
		
}
示例#9
0
//--------------------------------------------------------------
void ofApp::setup() {

	isLive			= true;
	isTracking		= false;
	isTrackingHands	= true;
	isFiltering		= false;
	isRecording		= false;
	isCloud			= false;
	isCPBkgnd		= true;
	isMasking		= true;

	nearThreshold = 500;
	farThreshold  = 1000;

	filterFactor = 0.1f;

	setupRecording();

	ofBackground(0, 0, 0);

}
示例#10
0
文件: testApp.cpp 项目: stpn/Turing
//--------------------------------------------------------------
void testApp::setup() {

	isLive			= true;
	isTracking		= true;
	isTrackingHands	= true;
	isFiltering		= false;
	isRecording		= false;
	isCloud			= false;
	isCPBkgnd		= true;
	isMasking		= true;

	nearThreshold = 500;
	farThreshold  = 1000;

	filterFactor = 0.1f;

	setupRecording();

	ofBackground(0, 0, 0);

    tracker.setup();
    
    recorder.setup();
    fboSaver.allocate(320, 240, GL_RGB);
    fboPixels.allocate(320, 240, OF_IMAGE_COLOR);
//    ofEnableAlphaBlending();
//    alphaPNG.loadImage("1.png"); //test image
    fboSaver.begin();
    ofClear(255,255,255,0);
    fboSaver.end();

    
    
    //    cam.listDevices();
    //    
    //    cam.setDeviceID(3);
    // 	cam.initGrabber(640, 480);


}
示例#11
0
//--------------------------------------------------------------
void testApp::setup() {

	isLive			= true;
	isTracking		= false;
	isTrackingHands	= true;
	isFiltering		= false;
	isRecording		= false;
	isCloud			= false;
	isCPBkgnd		= true;
	isMasking		= true;
    isPlaying       = false;

	nearThreshold = 500;
	farThreshold  = 1000;

	filterFactor = 0.1f;

	setupRecording();

	ofBackground(0, 0, 0);
    
    //What midi ports are available?
    midiOut.listPorts(); // via instance
    midiOut.openPort("passthrough_in");	// by name
    
    
    channel = 1;
    synthChan = 2;
    effectsChan = 3;
	currentPgm = 0;
	note = 0;
	velocity = 0;
	pan = 0;
	bend = 0;
	touch = 0;
	polytouch = 0;
    
    opFlow.setup(ofRectangle(ofRectangle(0,0, recordDepth.getWidth()/2, recordDepth.getHeight()/2 )));
    opFlow.setOpticalFlowBlur(15);
}
示例#12
0
//--------------------------------------------------------------
void testApp::setup() {
/*	isLive			= false;
	isTracking		= false;
	isTrackingHands	= false;
	isFiltering		= false;
	isRecording		= false;
	isCloud			= false;
	isCPBkgnd		= true;
	isMasking		= false;
*/
	isBaselineSet = false;

	nearThreshold = 500;
	farThreshold  = 1000;

//	filterFactor = 0.1f;

	setupRecording();
	
	ofBackground(0, 0, 0);

}
示例#13
0
//--------------------------------------------------------------
void testApp::setup() {

	isLive			= true;
	isTracking		= false;
	isTrackingHands	= true;
	isFiltering		= false;
	isRecording		= false;
	isCloud			= false;
	isCPBkgnd		= true;
	isMasking		= true;

	nearThreshold = 500;
	farThreshold  = 1000;

	filterFactor = 0.1f;

	setupRecording();
    
    //firm
    setGUI(); 

}
//--------------------------------------------------------------
void kinectController::setup() 
{
	isTracking		= true;
	isFiltering		= false;
	isRecording		= false;
	isCloud			= false;
	isCPBkgnd		= true;
	isMasking		= true;

    showFullBodyTracker = false;
    showHands = false;
    
	nearThreshold = 500;
	farThreshold  = 1000;
    
	filterFactor = 0.1f;
    
	setupRecording();
    
	ofBackground(0, 0, 0);
    
    myFont.loadFont("frabk.ttf", 32);
}
示例#15
0
//--------------------------------------------------------------
void testApp::setup() {
	
	isLive			= true;
	isTracking		= true;
	isTrackingHands	= false;
	isFiltering		= false;
	isRecording		= false;
	isCPBkgnd		= true;
	isMasking		= true;
	masks			=true;
	drawRef			=true;
	
	smooth = true;
	full = false;
	
	showHandsImage=true;
	
	ofEnableAlphaBlending();
	
	
	ofSetVerticalSync(false);
	
	nearThreshold = 500;
	farThreshold  = 2000;
	
	filterFactor = 0.1f;
	
	setupRecording();
	
	ofBackground(0, 0, 0);
	
	//vidGrabber.setVerbose(true);
	//vidGrabber.initGrabber(640,480);
	
	
	leftCrop =0.0;
	bottomCrop =0.0;
	w=640;
	h=480;
	x=0;
	y=0;
	
	ratio = float(w)/h;
	
	
	handDepth=10;
	
	//---------------------------GUI----------------------------------------------------------------
	
	gui = new MyGUI();
	MyPanel* settingsPanel = new MyPanel("Settings", 0, 500);
	gui->addControl( settingsPanel );
	
	MySlider* leftCropSlider = new MySlider("Left Crop", 10, 30, 200, 20, &leftCrop, 0, 200);
	settingsPanel->addControl( leftCropSlider );
	
	MySlider* bottomCropSlider = new MySlider("Bottom Crop", 10, 60, 200, 20, &bottomCrop, 0, 200);
	settingsPanel->addControl( bottomCropSlider );
	
	MySlider* handDepthSlider = new MySlider("Hand Depth", 10, 90, 200, 20, &handDepth, 0, 100);
	settingsPanel->addControl( handDepthSlider );
	
	//----------------------------------------------------------------------------------------------
	
	
	
	CVcolorImg.allocate(640,480);
	CVgrayImage.allocate(640,480);
	handsImage.allocate(640,480);
	handsImage.setUseTexture(true);
	CVgrayImage.setUseTexture(true);
	//CVgrayDepth.allocate(640,480);
	//CVgrayDepth.setUseTexture(true);

	 //background.loadImage("background.png");
	background.loadImage("underwater.jpg");
	//allUserMasks.allocate(recordUser.getWidth(), recordUser.getHeight(), OF_IMAGE_GRAYSCALE);
	colorImg.allocate(recordImage.getWidth(), recordImage.getHeight(), OF_IMAGE_COLOR);
	
	maskShader.load("composite.vert", "composite.frag");
	maskShader.begin();
	maskShader.setUniformTexture("Tex0", colorImg.getTextureReference(), 0);
	maskShader.setUniformTexture("Tex1", CVgrayImage.getTextureReference(), 1);
	maskShader.end();
	
}
示例#16
0
//--------------------------------------------------------------
void testApp::keyPressed  (int key){

	switch (key){
		case ' ':
			bFullScreen = !bFullScreen;
			break;
		case 'b':
			bLearnBakground = true;
			break;
		case '+':
			threshold ++;
			if (threshold > 255) threshold = 255;
			break;
		case '-':
			threshold --;
			if (threshold < 0) threshold = 0;
			break;
        case '3':   threshold = 10;
                    algorithm = key-48; // character '1' is dec number 49
                    break;
        case '0':
		case '1':
		case '2':
		case '4':
		case '5':
                    threshold = 80;
                    algorithm = key-48; // character '1' is dec number 49
                    break;
        case '6':
        case '7':
        case '8':   cropping = key;
                    break;
        case 'Z':
        case 'z':   bZoomTarget = !bZoomTarget;
                    break;
        case 's':   saveFrame = true;
                    break;

        case 'd':   settings.setDefaults();
                    break;
        case 'l':   bLiveVideo = !bLiveVideo;
                    if (bLiveVideo) {
                        vidPlayer.close();
                        setupLiveVideo();
                    }
                    else {
                        vidGrabber.close();
                        loadMovie();
                    }
                    ofSleepMillis(500);
                    break;

        case 'a':   bMovieAudio = !bMovieAudio;
                    if (!bMovieAudio) vidPlayer.setVolume(0);
                    else vidPlayer.setVolume(1);
                    break;
        case 'h':   hFlip = !hFlip; break;
        case 'v':   vFlip = !vFlip; break;

        case 'r':   if (!bRecording) {
                        setupRecording();
                    }
                    else  {
                        recorder.stop();
                        cout << "STOP RECORDING"<< endl;
                    }
                    bRecording = ! bRecording;
                    break;

        case 'g': bShowGui = !bShowGui; break;
        case 'o': bLiveVideo = false;
                  vidGrabber.close();
                  loadMovie();
                  break;
        case 'w': openOutWin(); break;
        case 'i': // initialize connection
                  /*for(int i = 0; i < NUM_CAMERAS; i++) {
                        ofRemoveListener(ipGrabber[i]->videoResized, this, &testApp::videoResized);
                        ofxSharedIpVideoGrabber c( new ofxIpVideoGrabber());
                        IPCameraDef& cam = getNextCamera();
                        c->setUsername(cam.username);
                        c->setPassword(cam.password);
                        URI uri(cam.url);
                        c->setURI(uri);
                        c->connect();
                            ipGrabber[i] = c;
                  }*/
            break;
        case OF_KEY_F1: selOutput =  OUTPUT_IMAGE;
            break;
        case OF_KEY_F2: selOutput =  ANALYSIS_WINDOW;
            break;
        case OF_KEY_F3: selOutput =  INPUT_IMAGE;
            break;
        case OF_KEY_F4: selOutput =  FOUR_WINDOWS;
            break;
        case OF_KEY_F5: selOutput =  BG_IMAGE;
            break;

        default:
            break;
     }
}
//--------------------------------------------------------------
void testApp::setup() {
    
    //ofSetDataPathRoot("./");

    //glEnable (GL_BLEND);
    //glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
    
    ofEnableAlphaBlending();
    
    //Define camera width and height
    width = 640; //ofGetWidth();
	height = 480; //ofGetHeight();
    
	isLive			= true;
	isTracking		= true;
	isCloud			= false;
	isCPBkgnd		= true;
	isMasking		= true;
    isCalibrating   = true;
    isDebug         = false;

	nearThreshold = 500;
	farThreshold  = 1000;

	filterFactor = 0.5f;

	setupRecording();

    colorImage.allocate(width, height);
	grayImage.allocate(width, height);
	grayThres.allocate(width, height);
	
	// This uses the default camera calibration and marker file
	artk.setup(width, height);

	// ARTK+ does the thresholding for us
    threshold=70;
    artk.activateAutoThreshold(true);
    
    //SETUP EFIELD AND CHARGES
	//------------------------
	dbFlag = false;
	cCote = 4;
	rotY = 0.0;
	incRotY = 0.01;
	distZ = 100;
	
	//set Line Color to be WHITE
	lineColor.r=200;
	lineColor.g=255;
	lineColor.b=211;
	lineColor.a=86;
    

	//Setup Charges
    
    //Hands
	charge[0].set(width/2,height/2,0,1000.0);
	charge[1].set(width/2,height/2,0.0,1000.0);

	//ground
	charge[6].set(0,-1000,0,400.0);
	
	for (int i = 0; i < nbPLignes; i++)
	{
		pLigne[i].set(ofRandom(0,width),ofRandom(0,height),(ofRandom(0,0)),1);
	}
    
	ofBackground(0, 0, 0);
    
    marker0.set(0,0,0);
    marker1.set(0,0,0);

}
示例#18
0
//--------------------------------------------------------------
void openniTracking::setupDevice(int _w, int _h,bool _ir,int _ledState){

	_width	 = _w;
	_height  = _h;
	useIR	 = _ir;
	ledState = _ledState;

	useKinect		= true;
	isTrackingHands = true;

	nearThreshold	= 500;
	farThreshold	= 1000;
	filterFactor	= 0.1f;

	maxHands		  = NUM_KINECT_USERS*2;
	_osc_hands_Pos = new ofPoint[maxHands];

	setupRecording();

    if(isOniConnected){

        camTexture.allocate(_width,_height,GL_RGB);
        //////////////////////////////////////////////
        // opencv setup
        cleanImage.allocate(_width, _height);

        //////////////////////////////////////////////
        // set motion detection vars
        grayPrev.allocate(_width,_height);
        grayNow.allocate(_width,_height);
        motionImg.allocate(_width,_height);

        frameCounter	 = 0;
        numPixelsChanged = 0;
        mThreshold		 = 0;
        //////////////////////////////////////////////

        //////////////////////////////////////////////
        // contour finder
        contourReg				= new vector<ofVec2f>[MAX_NUM_CONTOURS_TO_FIND];
        contourSmooth			= new vector<ofVec2f>[MAX_NUM_CONTOURS_TO_FIND];
        contourSimple			= new vector<ofVec2f>[MAX_NUM_CONTOURS_TO_FIND];
        geomLines				= new vector<ofVec4f>[MAX_NUM_CONTOURS_TO_FIND];
        _s_blobGeom				= new vector<ofVec4f>[MAX_NUM_CONTOURS_TO_FIND];
        _osc_blobGeom			= new vector<ofVec4f>[MAX_NUM_CONTOURS_TO_FIND];
        _s_contourSmooth		= new vector<ofVec2f>[MAX_NUM_CONTOURS_TO_FIND];
        _osc_contourSmooth		= new vector<ofVec2f>[MAX_NUM_CONTOURS_TO_FIND];
        _s_contourSimple		= new vector<ofVec2f>[MAX_NUM_CONTOURS_TO_FIND];
        _osc_contourSimple		= new vector<ofVec2f>[MAX_NUM_CONTOURS_TO_FIND];

        box						= new CvBox2D32f[MAX_NUM_CONTOURS_TO_FIND];
        blobAngle				= new float[MAX_NUM_CONTOURS_TO_FIND];
        _kalman_blobInfo		= new CvBox2D32f[MAX_NUM_CONTOURS_TO_FIND];
        _s_blobInfo				= new CvBox2D32f[MAX_NUM_CONTOURS_TO_FIND];
        _osc_blobInfo			= new CvBox2D32f[MAX_NUM_CONTOURS_TO_FIND];

        for(unsigned int i=0;i<MAX_NUM_CONTOURS_TO_FIND;i++){
            kBlobInfoX[i]		= new ofxCvKalman(0.0f);
            kBlobInfoY[i]		= new ofxCvKalman(0.0f);
            kBlobInfoAngle[i]	= new ofxCvKalman(0.0f);
            kBlobInfoW[i]		= new ofxCvKalman(0.0f);
            kBlobInfoH[i]		= new ofxCvKalman(0.0f);
        }

        smoothPct = 0.13f;
        tolerance = 20.0f;
        //////////////////////////////////////////////

        //////////////////////////////////////////////
        // advanced blob tracking (blob order)
        blobTracker.setListener(this);
        blobsOrder = new int[MAX_USERS_HARDLIMIT];
        topLeftX = 1;
        topLeftY = 1;
        maxUsers = MAX_USERS_HARDLIMIT;
        lowRightX = _width;
        lowRightY = _height;
        userOffsetX = 0;
        userOffsetY = 0;
        moodSpike = 0.25f;
        nonMoodFrames = 3;
        currNonMoodFrame = 0;
        csState = CO_SELNONE;
        // All blobs are marked as dead initially
        for (unsigned int i = 0; i < MAX_USERS_HARDLIMIT; i++){
            usersTracking[i] = B_DEAD;
        }
        currAdjustment = AJ_THRESH;
        //////////////////////////////////////////////

        //////////////////////////////////////////////
        // optical flow
        opticalFlow.allocate(_width,_height);
        opticalFlowXGrid = (int)(_width/OPTICAL_FLOW_COLS_STEP);
        opticalFlowYGrid = (int)(_height/OPTICAL_FLOW_ROWS_STEP);

        _s_opfVel		= new ofVec4f[opticalFlowXGrid*opticalFlowYGrid];
        _osc_opfVel		= new ofVec4f[opticalFlowXGrid*opticalFlowYGrid];
        //////////////////////////////////////////////

        //////////////////////////////////////////////
        // matrix areas for motion trigger
        float	flopX = 30;
        float	flopY = 90;

        triggerAreas.setup(flopX,flopY,_width,_height,TRIGGER_AREAS_NUM);
        triggerAreas.loadSettings("settings/openni/triggerAreas.xml");
        triggerState = new bool[TRIGGER_AREAS_NUM];
        for(unsigned int i=0;i<TRIGGER_AREAS_NUM;i++){
            triggerState[i] = false;
        }
        lastAreaON = -1;
        actualArea = -1;
        lastTime = 0;
        silencePeriod = 0;
        //////////////////////////////////////////////

        //////////////////////////////////////////////
        // dummy gui vars init
        ciBlur = 1;
        ciErode = 1;
        ciDilate = 1;

        minBlobArea = 20;
        maxBlobArea = 100000;
        cfDetail = 1;

        onHorizon = 20;
        offHorizon = 50;

        computeContourFinder = false;
        computeContourGeometry = false;
        computeOpticalFlow = false;
        computeTriggerAreas = false;

        useKalmanFilter = false;
        _smoothingFactor = 0.95f;

        saveAllSettings = false;

        // OSC flags
        sendOsc_BD = false;
        sendOsc_CF = false;
        sendOsc_CG = false;
        sendOsc_OF = false;
        sendOsc_TA = false;
        sendOsc_HT = false;
        sendOsc_HW = true;
        //////////////////////////////////////////////
    }

}