void update() { #ifdef USE_AUDIO //Speaker sampling code speakerFbo.begin(); renderScene(shader, speakerXyzMap, speakerConfidenceMap); speakerFbo.end(); //Read back the fbo, and average it on the CPU speakerFbo.readToPixels(speakerPixels); speakerPixels.setImageType(OF_IMAGE_GRAYSCALE); ofxOscMessage brightnessMsg; brightnessMsg.setAddress("/audio/brightness"); float* pix = speakerPixels.getData(); for(int i = 0; i < n_speakers; i++){ float avg = 0; for(int j = 0; j < n_samples; j++){ avg += *pix++; } avg /= n_samples; brightnessMsg.addFloatArg(avg); } oscSender.sendMessage(brightnessMsg); float elapsedTime = ofGetElapsedTimef(); // copied from shader --- 8< --- float t = elapsedTime / 30.; // duration of each stage float stage = floor(t); // index of current stage float i = t - stage; // progress in current stage // copied from shader --- 8< --- if(stage != previousStage) { ofxOscMessage msg; msg.setAddress("/audio/scene_change_event"); msg.addIntArg(stage == 0 ? 0 : 2); oscSender.sendMessage(msg); } previousStage = stage; if(stage == 0) { float lighthouseAngle = ofGetElapsedTimef() / TWO_PI; lighthouseAngle += 0; // set offset here ofxOscMessage msg; msg.setAddress("/audio/lighthouse_angle"); msg.addFloatArg(fmodf(lighthouseAngle, 1)); oscSender.sendMessage(msg); } #endif }
void setup() { ofSetDataPathRoot("../../../../../SharedData/"); ofSetVerticalSync(true); // ofSetLogLevel(OF_LOG_VERBOSE); #ifdef USE_VIDEO video.loadMovie("videos/melica.mp4"); video.play(); #else video.setup(); #ifdef USE_EDSDK video.setDeviceType(EDSDK_MKII); #endif #endif ofFbo::Settings settings; settings.width = video.getWidth(); settings.height = video.getHeight(); settings.useDepth = false; buffer.allocate(settings); ofSetBackgroundAuto(false); contours.getTracker().setPersistence(100); contours.getTracker().setMaximumDistance(100); setupGui(); osc.setup("klaus.local", 7400); }
void keyPressed(int key) { server->keyPressed(key); if(key > '0' && key < '9') { ofxOscMessage msg; msg.setAddress("/audio/scene_change_event"); msg.addIntArg(key - '0'); oscSender.sendMessage(msg); } }
void sendOsc() { ofxOscMessage msg; msg.setAddress("/motors"); msg.addFloatArg(local.nwLength); msg.addFloatArg(local.nwSpeed); msg.addFloatArg(local.neLength); msg.addFloatArg(local.neSpeed); msg.addFloatArg(local.seLength); msg.addFloatArg(local.seSpeed); msg.addFloatArg(local.swLength); msg.addFloatArg(local.swSpeed); oscSend.sendMessage(msg); }
int main() { tuioSender.setup("127.0.0.1", 3333); // Create a sample listener and controller AppListener listener; Controller controller; // Have the sample listener receive events from the controller controller.addListener(listener); // Keep this process running until Enter is pressed std::cout << "Press Enter to quit..." << std::endl; std::cin.get(); // Remove the sample listener when done controller.removeListener(listener); return 0; }
void updateMotion() { // get overall motion // motion.update(graySmall); // motionValue = motion.getMean(); motionRunning.setLearningTime(motionLearningTime); motionRunning.update(graySmall, thresholdedRunning); motionValue = motionRunning.getPresence(); ofxOscMessage msg; msg.setAddress("/motion"); float t = ofGetElapsedTimef(); msg.addFloatArg(motionValue); osc.sendMessage(msg); if(motionValue > smoothedMotionValue) { smoothedMotionValue = ofLerp(motionValue, smoothedMotionValue, motionSmoothingUp); } else { smoothedMotionValue = ofLerp(motionValue, smoothedMotionValue, motionSmoothingDown); } }
void config(int id, shared_ptr<ServerApp> server) { this->id = id; this->server = server; ofSetVerticalSync(true); ofDisableAntiAliasing(); ofBackground(0); shader.loadAuto("../../../SharedData/shader/shader"); xyzMap.loadAuto("../../../SharedData/xyzMap-" + ofToString(id) + ".exr"); xyzMap.getTexture().setTextureMinMagFilter(GL_NEAREST, GL_NEAREST); confidenceMap.loadAuto("../../../SharedData/confidenceMap-" + ofToString(id) + ".exr"); confidenceMap.getTexture().setTextureMinMagFilter(GL_NEAREST, GL_NEAREST); ofLog() << xyzMap.getWidth() << " x " << xyzMap.getHeight(); ofLog() << confidenceMap.getWidth() << " x " << confidenceMap.getHeight(); #ifdef USE_AUDIO oscSender.setup("localhost", 7777); setupSpeakers(); #endif }
void setup() { ofBackground(255); ofSetFrameRate(60); config.load("config.xml"); defaultLength = config.getFloatValue("cable/length/default"); minLength = config.getFloatValue("cable/length/min"); maxLength = config.getFloatValue("cable/length/max"); host = config.getValue("osc/host"); sendPort = config.getIntValue("osc/sendPort"); receivePort = config.getIntValue("osc/receive"); oscSend.setup(host, sendPort); oscReceive.setup(receivePort); local.setup("Local"); remote.setup("Remote"); zeros.setup("Zeros"); zeros.add(nwZero.setup("NW Zero")); zeros.add(neZero.setup("NE Zero")); zeros.add(seZero.setup("SE Zero")); zeros.add(swZero.setup("SW Zero")); nwZero.addListener(this, &ofApp::zeroNW); neZero.addListener(this, &ofApp::zeroNE); seZero.addListener(this, &ofApp::zeroSE); swZero.addListener(this, &ofApp::zeroSW); local.gui.loadFromFile("settings.xml"); local.gui.setPosition(10, 10); zeros.setPosition(10, 200); remote.gui.setPosition(10, 310); }
int main(int argc, char **argv) { sender.setup(HOST, PORT); XnStatus rc = XN_STATUS_OK; rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH); CHECK_RC(rc, "InitFromXml"); rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator); CHECK_RC(rc, "Find depth generator"); rc = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator); CHECK_RC(rc, "Find user generator"); XnCallbackHandle h; g_UserGenerator.RegisterUserCallbacks(NewUser, LostUser, NULL, h); g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL); XnCallbackHandle hCalib; XnCallbackHandle hPose; g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(&CalibrationStart, &CalibrationEnd, NULL, hCalib); g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(&PoseDetected, NULL, NULL, hPose); rc = g_Context.StartGeneratingAll(); CHECK_RC(rc, "StartGenerating"); xn::DepthMetaData depthMD; g_DepthGenerator.GetMetaData(depthMD); fXRes = depthMD.XRes(); fYRes = depthMD.YRes(); fMaxDepth = depthMD.ZRes(); nCutOffMin = 0; nCutOffMax = fMaxDepth; nPointerX = fXRes / 2; nPointerY = fXRes / 2; nPointerDiffX = (WIN_SIZE_X / fXRes / 2) + 1; nPointerDiffY = (WIN_SIZE_Y / fXRes / 2) + 1; if (argc == 2) { nCutOffMax = atol(argv[1]); } srand(XnUInt32(time(NULL))); glutInit(&argc, argv); glutInitDisplayString("stencil depth>16 double rgb samples=0"); glutInitWindowSize(WIN_SIZE_X, WIN_SIZE_Y); glutCreateWindow("Prime Sense Stick Figure Sample"); glutSetCursor(GLUT_CURSOR_NONE); init_opengl(); glut_helpers_initialize(); cb.passive_motion_function = MotionCallback; cb.keyboard_function = key; camera.configure_buttons(0); camera.set_camera_mode(true); camera.set_parent_rotation( & camera.trackball.r); camera.enable(); object.configure_buttons(1); object.translator.t[2] = -1; object.translator.scale *= .1f; object.trackball.r = rotationf(vec3f(2.0,0.01,0.01), to_radians(180)); object.set_parent_rotation( & camera.trackball.r); object.disable(); light.configure_buttons(0); light.translator.t = vec3f(.5, .5, -1); light.set_parent_rotation( & camera.trackball.r); light.disable(); reshaper.zNear = 1; reshaper.zFar = 100; // make sure all interactors get glut events glut_add_interactor(&cb); glut_add_interactor(&camera); glut_add_interactor(&reshaper); glut_add_interactor(&light); glut_add_interactor(&object); camera.translator.t = vec3f(0, 0, 0); camera.trackball.r = rotationf(vec3f(0, 0, 0), to_radians(0)); light.translator.t = vec3f (0, 1.13, -2.41); light.trackball.r = rotationf(vec3f(0.6038, -0.1955, -0.4391), to_radians(102)); glutIdleFunc(idle); glutDisplayFunc(display); // Per frame code is in display glutMainLoop(); return (0); }
void AppListener::onFrame(const Controller& controller) { // Get the most recent frame and report some basic information const Frame frame = controller.frame(); ::Sleep(1); const HandList& hands = frame.hands(); const FingerList& fingers = frame.fingers(); const ToolList& tools = frame.tools(); const GestureList& gestures = frame.gestures(); if (hands.empty() && fingers.empty() && tools.empty() && gestures.empty()) return; #if 0 std::cout << "Frame id: " << frame.id() << ", timestamp: " << frame.timestamp() << " hands: " << hands.count() << " fingers: " << fingers.count() << " tools: " << tools.count() << " gestures: " << gestures.count() << std::endl; #endif ofxOscBundle bundle; ofxOscMessage alive; { alive.setAddress("/tuio/2Dcur"); alive.addStringArg("alive"); } ofxOscMessage fseq; { fseq.setAddress( "/tuio/2Dcur" ); fseq.addStringArg( "fseq" ); fseq.addIntArg(frame.id()); } if (!hands.empty()) { // Get the first hand const Hand& hand = hands[0]; // Check if the hand has any fingers const FingerList& fingers = hand.fingers(); if (!fingers.empty()) { for (int i = 0; i < fingers.count(); ++i) { const Vector& tipPos = fingers[i].tipPosition(); const Vector& tipVel = fingers[i].tipVelocity(); { #ifdef INIT_BBOX // max/min bbox bboxMax.x = std::max<float>(bboxMax.x, tipPos.x); bboxMax.y = std::max<float>(bboxMax.y, tipPos.y); bboxMax.z = std::max<float>(bboxMax.z, tipPos.z); bboxMin.x = std::min<float>(bboxMin.x, tipPos.x); bboxMin.y = std::min<float>(bboxMin.y, tipPos.y); bboxMin.z = std::min<float>(bboxMin.z, tipPos.z); #endif Vector tuioPos; tuioPos.x = normalize(tipPos.x, bboxMin.x, bboxMax.x); tuioPos.y = normalize(tipPos.y, bboxMax.y, bboxMin.y); tuioPos.z = normalize(tipPos.z, bboxMin.z, bboxMax.z); // tuio { ofxOscMessage m; m.setAddress( "/tuio/2Dcur" ); m.addStringArg("set"); m.addIntArg(fingers[i].id()); // id m.addFloatArg(tuioPos.x); // x m.addFloatArg(tuioPos.y); // y // TOTO m.addFloatArg(tipVel.x / 400); // dX m.addFloatArg(tipVel.y / 400); // dY m.addFloatArg(0); // maccel bundle.addMessage(m); alive.addIntArg(fingers[i].id()); // add blob to list of ALL active IDs } } } #ifdef INIT_BBOX std::cout << "min: " << bboxMin << std::endl; std::cout << "max: " << bboxMax << std::endl; #endif } #if 0 // Get the hand's sphere radius and palm position std::cout << "Hand sphere radius: " << hand.sphereRadius() << " mm, palm position: " << hand.palmPosition() << std::endl; #endif // Get the hand's normal vector and direction const Vector normal = hand.palmNormal(); const Vector direction = hand.direction(); #if 0 // Calculate the hand's pitch, roll, and yaw angles std::cout << "Hand pitch: " << direction.pitch() * RAD_TO_DEG << " degrees, " << "roll: " << normal.roll() * RAD_TO_DEG << " degrees, " << "yaw: " << direction.yaw() * RAD_TO_DEG << " degrees" << std::endl; #endif } bundle.addMessage(alive); bundle.addMessage(fseq); tuioSender.sendBundle(bundle); // Get gestures for (int g = 0; g < gestures.count(); ++g) { Gesture gesture = gestures[g]; switch (gesture.type()) { case Gesture::TYPE_CIRCLE: { CircleGesture circle = gesture; std::string clockwiseness; if (circle.pointable().direction().angleTo(circle.normal()) <= PI/4) { clockwiseness = "clockwise"; } else { clockwiseness = "counterclockwise"; } // Calculate angle swept since last frame float sweptAngle = 0; if (circle.state() != Gesture::STATE_START) { CircleGesture previousUpdate = CircleGesture(controller.frame(1).gesture(circle.id())); sweptAngle = (circle.progress() - previousUpdate.progress()) * 2 * PI; } std::cout << "Circle id: " << gesture.id() << ", state: " << gesture.state() << ", progress: " << circle.progress() << ", radius: " << circle.radius() << ", angle " << sweptAngle * RAD_TO_DEG << ", " << clockwiseness << std::endl; break; } case Gesture::TYPE_SWIPE: { SwipeGesture swipe = gesture; std::cout << "Swipe id: " << gesture.id() << ", state: " << gesture.state() << ", direction: " << swipe.direction() << ", speed: " << swipe.speed() << std::endl; break; } case Gesture::TYPE_KEY_TAP: { KeyTapGesture tap = gesture; std::cout << "Key Tap id: " << gesture.id() << ", state: " << gesture.state() << ", position: " << tap.position() << ", direction: " << tap.direction()<< std::endl; break; } case Gesture::TYPE_SCREEN_TAP: { ScreenTapGesture screentap = gesture; std::cout << "Screen Tap id: " << gesture.id() << ", state: " << gesture.state() << ", position: " << screentap.position() << ", direction: " << screentap.direction()<< std::endl; break; } default: std::cout << "Unknown gesture type." << std::endl; break; } } #if 0 if (!hands.empty() || !gestures.empty()) { std::cout << std::endl; } #endif }