// Ensure that a certificate that WAS generated using the certificate // authority is NOT allowed to communicate when the SSL_REQUIRE_CERT // flag is enabled. TEST_F(SSLTest, RequireCertificate) { Try<Socket> server = setup_server({ {"SSL_ENABLED", "true"}, {"SSL_KEY_FILE", key_path().value}, {"SSL_CERT_FILE", certificate_path().value}, {"SSL_REQUIRE_CERT", "true"}}); ASSERT_SOME(server); Try<Subprocess> client = launch_client({ {"SSL_ENABLED", "true"}, {"SSL_KEY_FILE", key_path().value}, {"SSL_CERT_FILE", certificate_path().value}, {"SSL_REQUIRE_CERT", "true"}}, server.get(), true); ASSERT_SOME(client); Future<Socket> socket = server.get().accept(); AWAIT_ASSERT_READY(socket); // TODO(jmlvanre): Remove const copy. AWAIT_ASSERT_EQ(data, Socket(socket.get()).recv()); AWAIT_ASSERT_READY(Socket(socket.get()).send(data)); AWAIT_ASSERT_READY(await_subprocess(client.get(), 0)); }
// Ensure that a certificate that was not generated using the // certificate authority is still allowed to communicate as long as // the LIBPROCESS_SSL_VERIFY_CERT and LIBPROCESS_SSL_REQUIRE_CERT // flags are disabled. TEST_F(SSLTest, NoVerifyBadCA) { Try<Socket> server = setup_server({ {"LIBPROCESS_SSL_ENABLED", "true"}, {"LIBPROCESS_SSL_KEY_FILE", key_path().string()}, {"LIBPROCESS_SSL_CERT_FILE", certificate_path().string()}, {"LIBPROCESS_SSL_VERIFY_CERT", "false"}, {"LIBPROCESS_SSL_REQUIRE_CERT", "false"}}); ASSERT_SOME(server); Try<Subprocess> client = launch_client({ {"LIBPROCESS_SSL_ENABLED", "true"}, {"LIBPROCESS_SSL_KEY_FILE", scrap_key_path().string()}, {"LIBPROCESS_SSL_CERT_FILE", scrap_certificate_path().string()}, {"LIBPROCESS_SSL_REQUIRE_CERT", "true"}, {"LIBPROCESS_SSL_CA_FILE", certificate_path().string()}}, server.get(), true); ASSERT_SOME(client); Future<Socket> socket = server.get().accept(); AWAIT_ASSERT_READY(socket); // TODO(jmlvanre): Remove const copy. AWAIT_ASSERT_EQ(data, Socket(socket.get()).recv()); AWAIT_ASSERT_READY(Socket(socket.get()).send(data)); AWAIT_ASSERT_READY(await_subprocess(client.get(), 0)); }
TEST_F(SSLClientTest, client) { // Create the socket based on the 'use_ssl' flag. We use this to // test whether a regular socket could connect to an SSL server // socket. const Try<Socket> create = Socket::create(flags.use_ssl ? Socket::SSL : Socket::POLL); ASSERT_SOME(create); Socket socket = create.get(); Try<net::IP> ip = net::IP::parse(flags.server, AF_INET); EXPECT_SOME(ip); // Connect to the server socket located at `ip:port`. const Future<Nothing> connect = socket.connect(Address(ip.get(), flags.port)); // Verify that the client views the connection as established. AWAIT_EXPECT_READY(connect); // Send 'data' from the client to the server. AWAIT_EXPECT_READY(socket.send(flags.data)); // Verify the client received the message back from the server. AWAIT_EXPECT_EQ(flags.data, socket.recv()); }
void connect(handshake& shake, network& net, const std::string& hostname, uint16_t port, network::connect_handler handle_connect) { net.connect(hostname, port, std::bind(finish_connect, _1, _2, std::ref(shake), handle_connect)); }
//============================================================================ double push(network net, shared_ptr<vector<sample>> dataSet) { vector<double> input; vector<double> expected; vector<double> actual; double correct = 0; int i = 0; vector<sample>::iterator currentSample = dataSet->begin(); //network dnet(data.getTableName(), data.getAttributeCount(), data.getCatagoryCount(), HIDDEN_LAYERS, LAYER_SIZE); while(currentSample != dataSet->end()){ //currentSample->printSample(); input = currentSample->getValues(); expected = m_typeToVector[currentSample->getClassification()]; actual = net.push(input); if (m_data.classVector(m_data.getCatagoryCount(), catagorize(actual)) == expected) correct++; if (i % 10 == 0 && !m_quiet) { cout << "Actual = "; printVector(actual); cout << " "; cout << "Expected = "; printVector(expected); if (m_data.classVector(m_data.getCatagoryCount(), catagorize(actual)) != expected){ cout << " x"; } cout << endl; } i++; currentSample++; } double total = m_data.getSampleCount(); double percent = (correct / total) * 100; cout << correct << "/" << total << " = " << percent << "% correct." << endl; return percent; }
void CleanupExit() { if (NULL != _sessionManager) { delete _sessionManager; _sessionManager = NULL; } g_AS3Network.closeConnection(); g_Connected = 0; removeListeners(); delete _broadcaster; delete _waveDetector; delete _pushDetector; delete _swipeDetector; delete _steadyDetector; delete _circleDetector; delete _leftRightSlider; delete _upDownSlider; delete _inOutSlider; delete _trackPad; _context.Shutdown(); if(_featureSinglePoint) close(POINT_SOCKET); if(_featureSlider) close(SLIDER_SOCKET); if(_featureUserTracking) close(USER_TRACKING_SOCKET); close(SESSION_SOCKET); exit(1); }
void setupServer() { g_Connected = 1; if (pthread_create(&g_ServerThread, NULL, &serverData, NULL)) { fprintf(stderr, "AS3OpenNI :: Error on pthread_create() for the server\n"); } g_AS3Network.sendMessage(0,0,0); }
// Test a basic back-and-forth communication within the same OS // process. TEST_P(SSLTest, BasicSameProcess) { os::setenv("LIBPROCESS_SSL_ENABLED", "true"); os::setenv("LIBPROCESS_SSL_KEY_FILE", key_path().string()); os::setenv("LIBPROCESS_SSL_CERT_FILE", certificate_path().string()); os::setenv("LIBPROCESS_SSL_REQUIRE_CERT", "true"); os::setenv("LIBPROCESS_SSL_CA_DIR", os::getcwd()); os::setenv("LIBPROCESS_SSL_CA_FILE", certificate_path().string()); os::setenv("LIBPROCESS_SSL_VERIFY_IPADD", GetParam()); openssl::reinitialize(); const Try<Socket> server_create = Socket::create(Socket::SSL); ASSERT_SOME(server_create); const Try<Socket> client_create = Socket::create(Socket::SSL); ASSERT_SOME(client_create); Socket server = server_create.get(); Socket client = client_create.get(); // We need to explicitly bind to INADDR_LOOPBACK so the certificate // we create in this test fixture can be verified. ASSERT_SOME(server.bind(Address(net::IP(INADDR_LOOPBACK), 0))); const Try<Nothing> listen = server.listen(BACKLOG); ASSERT_SOME(listen); const Try<Address> server_address = server.address(); ASSERT_SOME(server_address); const Future<Socket> _socket = server.accept(); const Future<Nothing> connect = client.connect(server_address.get()); // Wait for the server to have accepted the client connection. AWAIT_ASSERT_READY(_socket); Socket socket = _socket.get(); // TODO(jmlvanre): Remove const copy. // Verify that the client also views the connection as established. AWAIT_ASSERT_READY(connect); // Send a message from the client to the server. const string data = "Hello World!"; AWAIT_ASSERT_READY(client.send(data)); // Verify the server received the message. AWAIT_ASSERT_EQ(data, socket.recv()); // Send the message back from the server to the client. AWAIT_ASSERT_READY(socket.send(data)); // Verify the client received the message. AWAIT_ASSERT_EQ(data, client.recv()); }
void XN_CALLBACK_TYPE UserCalibration_CalibrationEnd(SkeletonCapability& capability, XnUserID nId, XnBool bSuccess, void* pCookie) { if (bSuccess) { if(_printUserTracking) printf("AS3OpenNI :: Calibration complete, start tracking user: %d\n", nId); _userGenerator.GetSkeletonCap().StartTracking(nId); char cValue[50]; sprintf(cValue, "user_tracking_user_calibration_complete:%d", nId); if(_useSockets) { #if (XN_PLATFORM == XN_PLATFORM_WIN32) g_AS3Network.sendMessage(1,8,nId); #else sendToSocket(USER_TRACKING_SOCKET, cValue); #endif } } else { if(_printUserTracking) printf("AS3OpenNI :: Calibration failed for user: %d\n", nId); if (_needPose) { _userGenerator.GetPoseDetectionCap().StartPoseDetection(_strPose, nId); } else { _userGenerator.GetSkeletonCap().RequestCalibration(nId, true); } char cValue[50]; sprintf(cValue, "user_tracking_user_calibration_failed:%d", nId); if(_useSockets) { #if (XN_PLATFORM == XN_PLATFORM_WIN32) g_AS3Network.sendMessage(1,9,nId); #else sendToSocket(USER_TRACKING_SOCKET, cValue); #endif } } }
void scan_Worker(network net, bool increment = false) { int count = 1; if(increment) // work upwards { addr next = net.increment_naddr(count); while(next != net.get_baddr() && pings.find(next) == pings.end()) { if(sys_ping(next)) { ping_mutex.lock(); pings[next] = increment; ping_mutex.unlock(); } count++; next = net.increment_naddr(count); } return; } addr prev = net.decrement_baddr(count); // workers haven't met in the middle and next isn't broadcast while(prev != net.get_naddr() && pings.find(prev) == pings.end()) { if(sys_ping(prev)) { ping_mutex.lock(); pings[prev] = increment; ping_mutex.unlock(); } count++; prev = net.decrement_baddr(count); } return; }
//============================================================================ void train(network net, shared_ptr<vector<sample>> dataSet) { vector<double> input; vector<double> expected; vector<double> actual; vector<sample>::iterator currentSample = dataSet->begin(); #if WINDOWSSYSTEM FILETIME tStart, tEnd; GetSystemTimeAsFileTime(&tStart); #else struct timeval profileStart, profileEnd; gettimeofday(&profileStart, NULL); #endif int p = 0, oldp = -1; int iterations = m_data.getSampleCount() * ITERATIONS; outputStatusMessage("Training the network"); for (unsigned int g = 0; g < ITERATIONS; g++) { p = calcPercentageComplete(g, ITERATIONS); if (oldp < p) { oldp = updateOutputPercentage(p, false); } currentSample = dataSet->begin(); while(currentSample != dataSet->end()){ input = currentSample->getValues(); expected = m_typeToVector[currentSample->getClassification()]; actual = net.push(input); net.train(input, expected); currentSample++; } } updateOutputPercentage(p, true); #if WINDOWSSYSTEM GetSystemTimeAsFileTime(&tEnd); PrintTimeDifference(tStart, tEnd); #else gettimeofday(&profileEnd, NULL); PrintTimeDifference(profileStart, profileEnd); #endif }
void print_output(network & net){ int i, size; std::vector<float> output; output = net.output(); size = output.size(); std::cout<<"["; for(i=0; i<size-1; i++) std::cout<<output[i]<<" "; if(i<size) std::cout<<output[i]; std::cout<<"]"<<std::endl; }
void XN_CALLBACK_TYPE UserCalibration_CalibrationStart(SkeletonCapability& capability, XnUserID nId, void* pCookie) { if(_printUserTracking) printf("AS3OpenNI :: Calibration started for user: %d\n", nId); char cValue[50]; sprintf(cValue, "user_tracking_user_calibration_start:%d", nId); if(_useSockets) { #if (XN_PLATFORM == XN_PLATFORM_WIN32) g_AS3Network.sendMessage(1,7,nId); #else sendToSocket(USER_TRACKING_SOCKET, cValue); #endif } }
void XN_CALLBACK_TYPE User_LostUser(UserGenerator& generator, XnUserID nId, void* pCookie) { if(_printUserTracking) printf("AS3OpenNI :: Lost user: %d\n", nId); char cValue[50]; sprintf(cValue, "user_tracking_lost_user:%d", nId); if(_useSockets) { #if (XN_PLATFORM == XN_PLATFORM_WIN32) g_AS3Network.sendMessage(1,3,nId); #else sendToSocket(USER_TRACKING_SOCKET, cValue); #endif } }
void XN_CALLBACK_TYPE UserPose_PoseDetected(PoseDetectionCapability& capability, const XnChar* strPose, XnUserID nId, void* pCookie) { if(_printUserTracking) printf("AS3OpenNI :: Pose %s detected for user: %d\n", strPose, nId); _userGenerator.GetPoseDetectionCap().StopPoseDetection(nId); _userGenerator.GetSkeletonCap().RequestCalibration(nId, true); char cValue[50]; sprintf(cValue, "user_tracking_pose_detected:%d", nId); if(_useSockets) { #if (XN_PLATFORM == XN_PLATFORM_WIN32) g_AS3Network.sendMessage(1,6,nId); #else sendToSocket(USER_TRACKING_SOCKET, cValue); #endif } }
int main(int argc, char** argv) { char input = 0; timer ter; cout << "Hello! This is a GameFramework application." << endl; cout << "Press q to quit, press s to Start Server." << endl; while(1) { cin >> input; if(input == 's') { global_network.listen_thread(); } else if(input == 'q') { break; } else if(input != '\n') { cout << "Unknown command '" << input << "'! Ignoring...\n"; } } cout << ter.elapsed()<<"s passed."<<endl; return 0; }
void XN_CALLBACK_TYPE User_NewUser(UserGenerator& generator, XnUserID nId, void* pCookie) { if(_printUserTracking) printf("AS3OpenNI :: New User: %d\n", nId); if(_needPose) { _userGenerator.GetPoseDetectionCap().StartPoseDetection(_strPose, nId); } else { _userGenerator.GetSkeletonCap().RequestCalibration(nId, true); } char cValue[50]; sprintf(cValue, "user_tracking_new_user:%d", nId); if(_useSockets) { #if (XN_PLATFORM == XN_PLATFORM_WIN32) g_AS3Network.sendMessage(1,2,nId); #else sendToSocket(USER_TRACKING_SOCKET, cValue); #endif } }
std::string pp(const network& x) { std::stringstream s; x.write(s); return s.str(); }
int main(int argc, char *argv[]) { //--------------------------------------------------------------------// //------------------------- SETUP REQUIRED NODES ---------------------// //--------------------------------------------------------------------// // Setup the command line parameters. setupParams(argc, argv); // Setup all the sockets. setupSockets(); // Setup the capture socket server for Mac. #if (XN_PLATFORM == XN_PLATFORM_MACOSX) if(_featureDepthMapCapture || _featureRGBCapture) { if(_useSockets) { g_AS3Network = network(); g_AS3Network.init(setupServer); } } #endif // Setup the status. XnStatus _status = XN_STATUS_OK; EnumerationErrors _errors; // Context Init and Add license. _status = _context.Init(); CHECK_RC(_status, "AS3OpenNI :: Initialize context"); _context.SetGlobalMirror(_mirror); XnChar vendor[XN_MAX_NAME_LENGTH]; XnChar license[XN_MAX_LICENSE_LENGTH]; _license.strVendor[XN_MAX_NAME_LENGTH] = strcmp(vendor, "PrimeSense"); _license.strKey[XN_MAX_LICENSE_LENGTH] = strcmp(license, "0KOIk2JeIBYClPWVnMoRKn5cdY4="); _status = _context.AddLicense(_license); CHECK_RC(_status, "AS3OpenNI :: Added license"); // Set it to VGA maps at 30 FPS _depthMode.nXRes = 640; _depthMode.nYRes = 480; _depthMode.nFPS = 30; // Depth map create. _status = _depth.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Create depth generator"); _status = _depth.SetMapOutputMode(_depthMode); // Depth map create. _status = _image.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Create image generator"); _status = _image.SetMapOutputMode(_depthMode); _status = _image.SetPixelFormat(XN_PIXEL_FORMAT_RGB24); // Create the hands generator. _status = _hands.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Create hands generator"); _hands.SetSmoothing(0.1); // Create the gesture generator. _status = _gesture.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Create gesture generator"); // Create user generator. _status = _userGenerator.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Find user generator"); // Create and initialize point tracker _sessionManager = new XnVSessionManager(); _status = _sessionManager->Initialize(&_context, "Wave", "RaiseHand"); if (_status != XN_STATUS_OK) { printf("AS3OpenNI :: Couldn't initialize the Session Manager: %s\n", xnGetStatusString(_status)); CleanupExit(); } _sessionManager->RegisterSession(NULL, &SessionStart, &SessionEnd, &SessionProgress); // Start catching signals for quit indications CatchSignals(&_quit); //---------------------------------------------------------------// //------------------------- SETUP FEATURES ---------------------// //--------------------------------------------------------------// // Define the Wave and SinglePoint detectors. _waveDetector = new XnVWaveDetector(); // SinglePoint detector. if(_featureSinglePoint) _waveDetector->RegisterPointUpdate(NULL, &OnPointUpdate); // Feature Gesture. if(_featureGesture) { // Wave detector. _waveDetector->RegisterWave(NULL, &OnWave); // Push detector. _pushDetector = new XnVPushDetector(); _pushDetector->RegisterPush(NULL, &onPush); // Swipe detector. _swipeDetector = new XnVSwipeDetector(); _swipeDetector->RegisterSwipeUp(NULL, &Swipe_SwipeUp); _swipeDetector->RegisterSwipeDown(NULL, &Swipe_SwipeDown); _swipeDetector->RegisterSwipeLeft(NULL, &Swipe_SwipeLeft); _swipeDetector->RegisterSwipeRight(NULL, &Swipe_SwipeRight); // Steady detector. _steadyDetector = new XnVSteadyDetector(); _steadyDetector->RegisterSteady(NULL, &Steady_OnSteady); } // Feature Circle. if(_featureCircle) { // Circle detector. _circleDetector = new XnVCircleDetector(); _circleDetector->RegisterCircle(NULL, &CircleCB); _circleDetector->RegisterNoCircle(NULL, &NoCircleCB); _circleDetector->RegisterPrimaryPointCreate(NULL, &Circle_PrimaryCreate); _circleDetector->RegisterPrimaryPointDestroy(NULL, &Circle_PrimaryDestroy); } // Feature Slider. if(_featureSlider) { // Left/Right slider. _leftRightSlider = new XnVSelectableSlider1D(3, 0, AXIS_X); _leftRightSlider->RegisterActivate(NULL, &LeftRightSlider_OnActivate); _leftRightSlider->RegisterDeactivate(NULL, &LeftRightSlider_OnDeactivate); _leftRightSlider->RegisterPrimaryPointCreate(NULL, &LeftRightSlider_OnPrimaryCreate); _leftRightSlider->RegisterPrimaryPointDestroy(NULL, &LeftRightSlider_OnPrimaryDestroy); _leftRightSlider->RegisterValueChange(NULL, &LeftRightSlider_OnValueChange); _leftRightSlider->SetValueChangeOnOffAxis(false); // Up/Down slider. _upDownSlider = new XnVSelectableSlider1D(3, 0, AXIS_Y); _upDownSlider->RegisterActivate(NULL, &UpDownSlider_OnActivate); _upDownSlider->RegisterDeactivate(NULL, &UpDownSlider_OnDeactivate); _upDownSlider->RegisterPrimaryPointCreate(NULL, &UpDownSlider_OnPrimaryCreate); _upDownSlider->RegisterPrimaryPointDestroy(NULL, &UpDownSlider_OnPrimaryDestroy); _upDownSlider->RegisterValueChange(NULL, &UpDownSlider_OnValueChange); _upDownSlider->SetValueChangeOnOffAxis(false); // In/Out slider. _inOutSlider = new XnVSelectableSlider1D(3, 0, AXIS_Z); _inOutSlider->RegisterActivate(NULL, &InOutSlider_OnActivate); _inOutSlider->RegisterDeactivate(NULL, &InOutSlider_OnDeactivate); _inOutSlider->RegisterPrimaryPointCreate(NULL, &InOutSlider_OnPrimaryCreate); _inOutSlider->RegisterPrimaryPointDestroy(NULL, &InOutSlider_OnPrimaryDestroy); _inOutSlider->RegisterValueChange(NULL, &InOutSlider_OnValueChange); _inOutSlider->SetValueChangeOnOffAxis(false); } // Feature TrackPad. if(_featureTrackPad) { // Track Pad. if(trackpad_columns > 0 && trackpad_rows > 0) { _trackPad = new XnVSelectableSlider2D(trackpad_columns, trackpad_rows); } else { _trackPad = new XnVSelectableSlider2D(4, 9); } _trackPad->RegisterItemHover(NULL, &TrackPad_ItemHover); _trackPad->RegisterItemSelect(NULL, &TrackPad_ItemSelect); _trackPad->RegisterPrimaryPointCreate(NULL, &TrackPad_PrimaryCreate); _trackPad->RegisterPrimaryPointDestroy(NULL, &TrackPad_PrimaryDestroy); } // Feature User Tracking. if(_featureUserTracking) { // Setup user generator callbacks. XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks; if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) { printf("AS3OpenNI :: Supplied user generator doesn't support skeleton\n"); return 1; } _userGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks); // Setup Skeleton detection. _userGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks); if (_userGenerator.GetSkeletonCap().NeedPoseForCalibration()) { _needPose = true; if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) { printf("AS3OpenNI :: Pose required, but not supported\n"); return 1; } _userGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks); _userGenerator.GetSkeletonCap().GetCalibrationPose(_strPose); } _userGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL); } // Create the broadcaster manager. _broadcaster = new XnVBroadcaster(); // Start generating all. _context.StartGeneratingAll(); // Set the frame rate. _status = xnFPSInit(&xnFPS, 180); CHECK_RC(_status, "AS3OpenNI :: FPS Init"); //----------------------------------------------------------------------// //------------------------- SETUP DISPLAY SUPPORT ---------------------// //--------------------------------------------------------------------// // Setup depth and image data. _depth.GetMetaData(_depthData); _image.GetMetaData(_imageData); // Hybrid mode isn't supported in this sample if (_imageData.FullXRes() != _depthData.FullXRes() || _imageData.FullYRes() != _depthData.FullYRes()) { printf ("AS3OpenNI :: The device depth and image resolution must be equal!\n"); return 1; } // RGB is the only image format supported. if (_imageData.PixelFormat() != XN_PIXEL_FORMAT_RGB24) { printf("AS3OpenNI :: The device image format must be RGB24\n"); return 1; } // Setup the view points to match between the depth and image maps. if(_snapPixels) _depth.GetAlternativeViewPointCap().SetViewPoint(_image); //-------------------------------------------------------------// //------------------------- MAIN LOOP ------------------------// //-----------------------------------------------------------// // Setup the capture socket server for PC. #if (XN_PLATFORM == XN_PLATFORM_WIN32) if(_featureDepthMapCapture || _featureRGBCapture || _featureUserTracking) { if(_useSockets) { g_AS3Network = network(); g_AS3Network.init(setupServer); } } #endif // Main loop while ((!_kbhit()) && (!_quit)) { xnFPSMarkFrame(&xnFPS); _context.WaitAndUpdateAll(); _sessionManager->Update(&_context); if(_featureDepthMapCapture) captureDepthMap(g_ucDepthBuffer); if(_featureRGBCapture) captureRGB(g_ucImageBuffer); #if (XN_PLATFORM == XN_PLATFORM_WIN32) if(_featureUserTracking) getPlayers(); #else if(_featureUserTracking) renderSkeleton(); #endif } CleanupExit(); }
void *serverData(void *arg) { int len = 8*10; unsigned char *buff = (unsigned char*)malloc(len); // Command buffer while(g_Connected) { len = g_AS3Network.getData(buff, 1024); if(len > 0 && len % 6 == 0) { // Get the number of commands received. int max = len / 6; int i; // For each command received. for(i = 0; i < max; i++) { int code = buff[0 + (i*6)]; switch(code) { case 1: // OPENNI switch(buff[1 + (i*6)]) { case 0: // GET DEPTH if(_featureDepthMapCapture) g_AS3Network.sendMessage(1,0,g_ucDepthBuffer,sizeof(g_ucDepthBuffer)); break; case 1: // GET RGB if(_featureRGBCapture) g_AS3Network.sendMessage(1,1,g_ucImageBuffer,sizeof(g_ucImageBuffer)); break; #if (XN_PLATFORM == XN_PLATFORM_WIN32) case 4: // GET USERS if(_featureUserTracking) { for (int i = 0; i < MAX_USERS; ++i) { g_AS3Network.sendMessage(1,4,g_ucPlayersBuffer[i].data,g_ucPlayersBuffer[i].size); } } break; case 5: // GET SKELETONS if(_featureUserTracking) { for (int i = 0; i < MAX_USERS; ++i) { g_AS3Network.sendMessage(1,5,g_ucSkeletonsBuffer[i].data,g_ucSkeletonsBuffer[i].size); } } break; #endif } break; } } } } return 0; }
double easy_eval(const tavla& t, const network& net, int turn) { double input[198]; t.to_vector(input, turn); return net.evaluate(input); }