LRESULT FavoriteHubsFrame::onSpeaker(UINT /*uMsg*/, WPARAM wParam, LPARAM lParam, BOOL& /*bHandled*/) { if(wParam == HUB_CONNECTED) { auto_ptr<string> hub(reinterpret_cast<string*>(lParam)); onlineHubs.push_back(*hub); for(int i = 0; i < ctrlHubs.GetItemCount(); ++i) { FavoriteHubEntry* e = (FavoriteHubEntry*)ctrlHubs.GetItemData(i); if(e->getServer() == *hub) { ctrlHubs.SetItem(i,0,LVIF_IMAGE, NULL, 0, 0, 0, NULL); ctrlHubs.Update(i); return 0; } } } else if(wParam == HUB_DISCONNECTED) { auto_ptr<string> hub(reinterpret_cast<string*>(lParam)); onlineHubs.erase(remove(onlineHubs.begin(), onlineHubs.end(), *hub), onlineHubs.end()); for(int i = 0; i < ctrlHubs.GetItemCount(); ++i) { FavoriteHubEntry* e = (FavoriteHubEntry*)ctrlHubs.GetItemData(i); if(e->getServer() == *hub) { ctrlHubs.SetItem(i,0,LVIF_IMAGE, NULL, 1, 0, 0, NULL); ctrlHubs.Update(i); return 0; } } } return 0; }
void Coroutine::notify() { // Unblock the coroutine when an event occurs. switch (status_) { case Coroutine::WAITING: status_ = Coroutine::RUNNABLE; break; case Coroutine::RUNNABLE: return; case Coroutine::RUNNING: // fallthrough case Coroutine::EXITED: // fallthrough case Coroutine::DELETED: // fallthrough case Coroutine::NEW: // fallthrough default: assert(!"illegal state"); break; } hub()->waiting_--; hub()->runnable_.push_back(shared_from_this()); }
int main(int argc, char ** argv) { char * service = NULL; char * env; env = getenv(ENV_SERVICE); if ((env != NULL) && (strlen(env) != 0)) { service = malloc(strlen(env) + 1); if (service == NULL) { perror("malloc"); return 1; } strcpy(service, env); } if (service == NULL) { if (argc != 2) { help(argv[0]); return 1; } service = argv[1]; } else { if (argc != 1) { help(argv[0]); return 1; } } printf("tcphub running on port %s\n", service); fflush(stdout); if (hub(service) < 0) { return 1; } }
int main(int argc, char** argv) { try { myo::Hub hub("com.example.hello-myo"); // not planning to change this yet std::cout << "Attempting to find a Myo..." << std::endl; myo::Myo* myo = hub.waitForMyo(10000); if (!myo) { throw std::runtime_error("Unable to find a Myo!"); } std::cout << "Connected to a Myo armsband!" << std::endl << std::endl; MyoAVGCoreController collector; // init the AVGController hub.addListener(&collector); while (1) { hub.run(1000 / 10); collector.print(); collector.eco_cycle(); } } catch (const std::exception& e) { std::cerr << "Error: " << e.what() << std::endl; std::cerr << "Press enter to continue."; std::cin.ignore(); return 1; } }
int main() { try { // An actual myo::Hub; NOT a MyoSim::Hub. myo::Hub hub("com.voidingwarranties.myo-simulator-example"); myo::Myo* myo = hub.waitForMyo(10000); if (!myo) { throw std::runtime_error("Unable to find a Myo!"); } // Record only pose events. MyoSim::EventRecorder recorder(MyoSim::EventRecorder::POSE); hub.addListener(&recorder); // Record for 5 seconds. myo->unlock(myo::Myo::unlockHold); hub.run(5000); myo->lock(); std::cout << "Events recorded. Press ENTER to replay events."; getchar(); MyoSim::Hub simulated_hub; PrintListener print_listener; simulated_hub.addListener(&print_listener); MyoSim::EventPlayer player(simulated_hub); player.play(recorder.getEventSession()); } catch (const std::exception& ex) { std::cerr << "Error: " << ex.what() << std::endl; return 1; } return 0; }
void fdf(t_p *f) { int i; int c; c = 0; i = 0; f->mlx = mlx_init(); f->win = mlx_new_window(f->mlx, f->h, f->l, "fdf"); drawx(f, 0X00FF00); drawy(f, 0X00FF00); hub(f); mlx_key_hook(f->win, key_funct, 0); mlx_loop(f->mlx); }
void Coroutine::block() { // Block the current coroutine until some I/O event occurs. The coroutine will // not be rescheduled until explicitly scheduled. // Anchor the coroutine, so that it doesn't get GC'ed while blocked on I/O. Ptr<Coroutine> anchor = shared_from_this(); assert(coroCurrent == this); switch (status_) { case Coroutine::RUNNING: status_ = Coroutine::BLOCKED; break; case Coroutine::EXITED: break; case Coroutine::DELETED: break; // fallthrough case Coroutine::RUNNABLE: // fallthrough case Coroutine::BLOCKED: // fallthrough case Coroutine::NEW: // fallthrough default: assert(!"illegal state"); break; } hub()->blocked_++; main()->swap(); }
int main(int argc, char** argv) { try { myo::Hub hub("com.example.multiple-myos"); // Instantiate the PrintMyoEvents class we defined above, and attach it as a listener to our Hub. PrintMyoEvents printer; hub.addListener(&printer); while (1) { // Process events for 10 milliseconds at a time. hub.run(10); } } catch (const std::exception& e) { std::cerr << "Error: " << e.what() << std::endl; std::cerr << "Press enter to continue."; std::cin.ignore(); return 1; } }
void Coroutine::wait() { // Block the current coroutine until some event occurs. The coroutine will not // be rescheduled until explicitly scheduled. // Anchor the coroutine, so that it doesn't get GC'ed while waiting. // FixMe: Should this be the case? Maybe a waiting coroutine should be // collected. Ptr<Coroutine> anchor = shared_from_this(); assert(coroCurrent == this); switch (status_) { case Coroutine::RUNNING: status_ = Coroutine::WAITING; break; case Coroutine::EXITED: break; case Coroutine::DELETED: break; // fallthrough case Coroutine::RUNNABLE: // fallthrough case Coroutine::BLOCKED: // fallthrough case Coroutine::NEW: // fallthrough default: assert(!"illegal state"); break; } hub()->waiting_++; main()->swap(); }
void MyoDevice::runDeviceLoop() { WearableDevice::setDeviceStatus(deviceStatus::RUNNING); GestureFilter gestureFilter(state, 0, mainGui); posePipeline.registerFilter(&gestureFilter); posePipeline.registerFilter(WearableDevice::sharedData); AveragingFilter averagingFilter(5); MyoTranslationFilter translationFilter(state); orientationPipeline.registerFilter(&averagingFilter); orientationPipeline.registerFilter(&translationFilter); orientationPipeline.registerFilter(WearableDevice::sharedData); mainGui->connectSignallerToProfileWidgets(&profileSignaller); AveragingFilter rssiAveragingFilter(5); rssiPipeline.registerFilter(&rssiAveragingFilter); rssiPipeline.registerFilter(WearableDevice::sharedData); connectPipeline.registerFilter(WearableDevice::sharedData); std::chrono::milliseconds rssi_start = std::chrono::duration_cast<std::chrono::milliseconds>( std::chrono::steady_clock::now().time_since_epoch()); /* Used to control when to request rssi */ std::chrono::milliseconds rssi_finish; try { Hub hub(appIdentifier); hub.setLockingPolicy(hub.lockingPolicyNone); Myo* myo = hub.waitForMyo(myoFindTimeout); if (!myo) { std::cout << "Could not find a Myo." << std::endl; WearableDevice::setDeviceStatus(deviceStatus::ERR); return; } MyoCallbacks myoCallbacks(*this); hub.addListener(&myoCallbacks); while (true) { if (WearableDevice::stopDeviceRequested()) { break; } filterDataMap extraData = gestureFilter.getExtraDataForSCD(); if (extraData.size() > 0) { WearableDevice::sharedData->setInput(extraData); WearableDevice::sharedData->process(); } if (profileSignaller.getProfileName() != prevProfileName) { prevProfileName = profileSignaller.getProfileName(); updateProfiles(); } rssi_finish = std::chrono::duration_cast<std::chrono::milliseconds>( std::chrono::steady_clock::now().time_since_epoch()); if ((rssi_finish - rssi_start).count() > MIN_RSSI_DELAY) { myo->requestRssi(); rssi_start = std::chrono::duration_cast<std::chrono::milliseconds>( std::chrono::steady_clock::now().time_since_epoch()); } hub.run(durationInMilliseconds); } } catch (const std::exception& e) { std::cout << "Exception: " << e.what() << std::endl; WearableDevice::setDeviceStatus(deviceStatus::ERR); return; } WearableDevice::setDeviceStatus(deviceStatus::DONE); }
int main(int argc, char *argv[]) { // Initalizing these to NULL prevents segfaults! AVFormatContext *pFormatCtx = NULL; int i, videoStream; AVCodecContext *pCodecCtxOrig = NULL; AVCodecContext *pCodecCtx = NULL; // 코덱 컨트롤러(?) 이걸 자주 쓴다. AVCodec *pCodec = NULL; // 영상을 디코딩할 코덱 AVFrame *pFrame = NULL; // 영상데이터 라고 보면됨. AVPacket packet; int frameFinished; struct SwsContext *sws_ctx = NULL; // Convert the image into YUV format that SDL uses //SDL 관련 변수 SDL_Overlay *bmp; SDL_Surface *screen; SDL_Rect rect; SDL_Event event; CVideoSocket videoSocket; //줌인 줌 아웃을 위한 변수 int rect_w = 0; int rect_h = 0; // We catch any exceptions that might occur below -- see the catch statement for more details. try { // 여기부터 마이오 초기화 // First, we create a Hub with our application identifier. Be sure not to use the com.example namespace when // publishing your application. The Hub provides access to one or more Myos. // 마이오에서 제공하는 어플리케이션과 연결하는 허브 생성 myo::Hub hub("com.example.hello-myo"); // 마이오 찾는중 ... std::cout << "Attempting to find a Myo..." << std::endl; // Next, we attempt to find a Myo to use. If a Myo is already paired in Myo Connect, this will return that Myo // immediately. // waitForMyo() takes a timeout value in milliseconds. In this case we will try to find a Myo for 10 seconds, and // if that fails, the function will return a null pointer. // 마이오를 찾는 동안 대기하는 소스코드 myo::Myo* myo = hub.waitForMyo(10000); // If waitForMyo() returned a null pointer, we failed to find a Myo, so exit with an error message. // 마이오가 존재하지 않을경우 예외처리 if (!myo) { throw std::runtime_error("Unable to find a Myo!"); } // We've found a Myo. std::cout << "Connected to a Myo armband!" << std::endl << std::endl; // Next we construct an instance of our DeviceListener, so that we can register it with the Hub. // 마이오에서 얻은 데이터를 가공해주는 클래스 DataCollector collector; // Hub::addListener() takes the address of any object whose class inherits from DeviceListener, and will cause // Hub::run() to send events to all registered device listeners. // 데이터를 지속적으로 받아온다. hub.addListener(&collector); //---여기까지 마이오 초기화 // SDL 초기화 InitSDL(); // Open video file // 파일 또는 데이터 스트림을 연다. if (avformat_open_input(&pFormatCtx, videoSocket.videoStreamUrl, NULL, NULL) != 0) { return -1; // Couldn't open file } // Retrieve stream information // 데이터 스트림의 정보를 얻어온다. if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { return -1; // Couldn't find stream information } // Dump information about file onto standard error av_dump_format(pFormatCtx, 0, videoSocket.videoStreamUrl, 0); // Find the first video stream // 비디로 스트림을 찾는과정 - 어떤 형식의 데이터 스트림인지 판별 ( 우리는 h.264로 고정되어있지만...) videoStream = -1; for (i = 0; (unsigned)i < pFormatCtx->nb_streams; i++) { if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videoStream = i; break; } } if (videoStream == -1) { return -1; // Didn't find a video stream } // Get a pointer to the codec context for the video stream pCodecCtxOrig = pFormatCtx->streams[videoStream]->codec; // Find the decoder for the video stream pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id); if (pCodec == NULL) { fprintf(stderr, "Unsupported codec!\n"); return -1; // Codec not found } // Copy context // 왜 인지 모르겠지만 그냥 쓰지 않고 복사해서 사용한다. pCodecCtx = avcodec_alloc_context3(pCodec); if (avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) { fprintf(stderr, "Couldn't copy codec context"); return -1; // Error copying codec context } // Open codec if (avcodec_open2(pCodecCtx, pCodec, NULL)<0) { return -1; // Could not open codec } // Allocate video frame pFrame = av_frame_alloc(); // Make a screen to put our video // 스크린을 생성 #ifndef __DARWIN__ screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 0, 0); #else screen = SDL_SetVideoMode(pCodecCtx->width, pCodecCtx->height, 24, 0); #endif if (!screen) { fprintf(stderr, "SDL: could not set video mode - exiting\n"); exit(1); } // Allocate a place to put our YUV image on that screen // 이미지를 스크린에 그림 bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen); // initialize SWS context for software scaling sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL ); while (av_read_frame(pFormatCtx, &packet) >= 0) { // 메인 루프 // In each iteration of our main loop, we run the Myo event loop for a set number of milliseconds. // 데이터를 어느정도 주기로 받아올지 정하는 소스 // 이 값이 낮아지면 영상을 받아오는데도 딜레이가 걸리기때문에 원하는 fps를 고려해야한다. hub.run(1000 / 500); // After processing events, we call the print() member function we defined above to print out the values we've // obtained from any events that have occurred. // 마이오 상태 모니터링 코드 collector.print(); // 마이오 루프 여기까지 // Is this a packet from the video stream? if (packet.stream_index == videoStream) { // Decode video frame avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); // Did we get a video frame? // 비디오 프레임을 비트맵 이미지로 변환 if (frameFinished) { SDL_LockYUVOverlay(bmp); AVPicture pict; pict.data[0] = bmp->pixels[0]; pict.data[1] = bmp->pixels[2]; pict.data[2] = bmp->pixels[1]; pict.linesize[0] = bmp->pitches[0]; pict.linesize[1] = bmp->pitches[2]; pict.linesize[2] = bmp->pitches[1]; // Convert the image into YUV format that SDL uses sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pict.data, pict.linesize); SDL_UnlockYUVOverlay(bmp); // 소프트웨어상으로 줌인 줌아웃을 하기위해 영상프레임의 사이즈를 조절 rect.x = -rect_w/2; rect.y = -rect_h/2; rect.w = pCodecCtx->width + rect_w; rect.h = pCodecCtx->height + rect_h; SDL_DisplayYUVOverlay(bmp, &rect); } } // Free the packet that was allocated by av_read_frame av_free_packet(&packet); SDL_PollEvent(&event); //// 마이오의 동작을 체크해서 메시지 송신 //// 좌우 카메라 컨트롤 if (collector.currentPose == myo::Pose::waveOut) { SendData(videoSocket.ClientSocket, "right", videoSocket.ToServer); rest = true; } if (collector.currentPose == myo::Pose::waveIn) { SendData(videoSocket.ClientSocket, "left", videoSocket.ToServer); rest = true; } // 상하 카메라 컨트롤 if (collector.currentPose == myo::Pose::fingersSpread && collector.pitch_w > 10) { SendData(videoSocket.ClientSocket, "up", videoSocket.ToServer); rest = true; } if (collector.currentPose == myo::Pose::fingersSpread && collector.pitch_w < 6) { SendData(videoSocket.ClientSocket, "down", videoSocket.ToServer); rest = true; } if (collector.currentPose == myo::Pose::rest &&rest == true) { SendData(videoSocket.ClientSocket, "stop", videoSocket.ToServer); rest = false; } if (collector.currentPose == myo::Pose::doubleTap && collector.roll_w <= 5) { collector.currentPose = myo::Pose::rest; rest = true; myo->lock(); } if (collector.currentPose == myo::Pose::doubleTap && collector.roll_w > 5) { rest = true; myo->unlock(myo::Myo::unlockHold); } // 마이오의 동작을 체크해서 줌인 줌 아웃 if (collector.currentPose == myo::Pose::fist && collector.roll_w < 6) { ZoomOut(rect_w, rect_h, 0); } if (collector.currentPose == myo::Pose::fist && collector.roll_w > 8) { ZoomIn(rect_w, rect_h, 300); } // 키 이벤트를 받는 함수 switch (event.type) { case SDL_QUIT: SDL_Quit(); exit(0); break; case SDL_KEYDOWN: /* Check the SDLKey values and move change the coords */ switch (event.key.keysym.sym){ case SDLK_LEFT: // 문자열 송신 SendData(videoSocket.ClientSocket, "left", videoSocket.ToServer); break; case SDLK_RIGHT: // 문자열 송신 SendData(videoSocket.ClientSocket, "right", videoSocket.ToServer); break; case SDLK_UP: SendData(videoSocket.ClientSocket, "up", videoSocket.ToServer); break; case SDLK_DOWN: SendData(videoSocket.ClientSocket, "down", videoSocket.ToServer); break; case SDLK_q: // 줌 인 ZoomIn(rect_w,rect_h,300); break; case SDLK_w: // 줌 아웃 ZoomOut(rect_w, rect_h, 0); break; case SDLK_s: // 모터 stop SendData(videoSocket.ClientSocket, "stop", videoSocket.ToServer); break; case SDLK_x: // 플그램 종료 SDL_Quit(); exit(0); break; default: break; } default: break; } } // Free the YUV frame av_frame_free(&pFrame); // Close the codecs avcodec_close(pCodecCtx); avcodec_close(pCodecCtxOrig); // Close the video file avformat_close_input(&pFormatCtx); // 소켓 닫기 closesocket(videoSocket.ClientSocket); WSACleanup(); return 0; } // 개인적으로 exception handling을 이렇게하는걸 좋아하지 않지만... // 예제에서 이렇게 사용하였기에 일단 이렇게 두었다. catch (const std::exception& e) { std::cerr << "Error: " << e.what() << std::endl; std::cerr << "Press enter to continue."; std::cin.ignore(); return 1; } }
///@brief Execute a device. int MyoDevice::run() { try { // Read the initialize file. this->readIniFile(); // Prepare to use SIGService. this->sigService.setName(this->serviceName); this->initializeSigService(sigService); // check receive SIGService data by another thread CheckRecvSIGServiceData checkRecvSIGServiceData; boost::thread thCheckRecvData(&CheckRecvSIGServiceData::run, &checkRecvSIGServiceData, &this->sigService); // First, we create a Hub with our application identifier. Be sure not to use the com.example namespace when // publishing your application. The Hub provides access to one or more Myos. myo::Hub hub("org.sigverse.myoplugin"); std::cout << "Attempting to find a Myo..." << std::endl; // Next, we attempt to find a Myo to use. If a Myo is already paired in Myo Connect, this will return that Myo // immediately. // waitForMyo() takes a timeout value in milliseconds. In this case we will try to find a Myo for 10 seconds, and // if that fails, the function will return a null pointer. myo::Myo* myo = hub.waitForMyo(10000); // If waitForMyo() returned a null pointer, we failed to find a Myo, so exit with an error message. if (!myo) { throw std::runtime_error("Unable to find a Myo!"); } // We've found a Myo. std::cout << "Connected to a Myo armband!" << std::endl << std::endl; // Next we enable EMG streaming on the found Myo. myo->setStreamEmg(myo::Myo::streamEmgEnabled); // Next we construct an instance of our DeviceListener, so that we can register it with the Hub. DataCollector collector; // Hub::addListener() takes the address of any object whose class inherits from DeviceListener, and will cause // Hub::run() to send events to all registered device listeners. hub.addListener(&collector); // Finally we enter our main loop. while (true) { // In each iteration of our main loop, we run the Myo event loop for a set number of milliseconds. // In this case, we wish to update our display 20 times a second, so we run for 1000/20 milliseconds. hub.run(1000/20); // After processing events, we call the print() member function we defined above to print out the values we've // obtained from any events that have occurred. MyoSensorData sensorData = collector.getSensorData(); // Send message to SigServer. std::string messageHeader = this->generateMessageHeader(); std::string sensorDataMessage = sensorData.encodeSensorData(); std::string message = messageHeader + sensorDataMessage; this->sendMessage(this->sigService, message); // std::cout << message << std::endl; } sigService.disconnect(); } catch (std::exception &ex) { std::cout << "run ERR :" << ex.what() << std::endl; throw ex; } return 0; }
int main(int argc, char** argv) { try { // ==== START MYO ==== // First, we create a Hub with our application identifier. Be sure not to use the com.example namespace when // publishing your application. The Hub provides access to one or more Myos. myo::Hub hub("com.example.emg-data-sample"); std::cout << "Attempting to find a Myo..." << std::endl; // Next, we attempt to find a Myo to use. If a Myo is already paired in Myo Connect, this will return that Myo // immediately. // waitForMyo() takes a timeout value in milliseconds. In this case we will try to find a Myo for 10 seconds, and // if that fails, the function will return a null pointer. myo::Myo* myo = hub.waitForMyo(10000); // If waitForMyo() returned a null pointer, we failed to find a Myo, so exit with an error message. if (!myo) { throw std::runtime_error("Unable to find a Myo!"); } // We've found a Myo. std::cout << "Connected to a Myo armband!" << std::endl << std::endl; // Next we enable EMG streaming on the found Myo. myo->setStreamEmg(myo::Myo::streamEmgEnabled); // ======= // Get the gesture name std::string gesture; // std::cin >> gesture; // DataCollector is myo::DeviceListener class with utilities for DataCollector collector(gesture); // Our own gesture classifier (random forest) randomforest classifier; classifier.createFromFile("randomforest_AD.txt"); // Hub::addListener() takes the address of any object whose class inherits from DeviceListener, and will cause // Hub::run() to send events to all registered device listeners. hub.addListener(&collector); histogram hist_filter; std::string detected_gesture; emgdatasampleVisualStudio2013::histGUI^ histogram_gui = gcnew emgdatasampleVisualStudio2013::histGUI(); histogram_gui->Show(); int n = 0; while (1) { histogram_gui->Refresh(); // In each iteration of our main loop, we run the Myo event loop for a set number of milliseconds. // In this case, we wish to update our display 20 times a second, so we run for 1000/20 milliseconds. hub.run(50); std::map<std::string, float> current_data = collector.getDataset(); detected_gesture = classifier.classify(current_data); std::cout << detected_gesture << std::endl; // Histogram hist_filter.add_instant(detected_gesture); histogram_gui->updateHist(hist_filter); if (n == 10) { std::cout << "Filtered gesture: " << std::endl; std::cout << hist_filter.peak() << std::endl << std::endl; hist_filter.restart(); n = 0; } else { n++; } } } catch (const std::exception& e) { std::cerr << "Error: " << e.what() << std::endl; std::cerr << "Press enter to continue."; std::cin.ignore(); return 1; } }
void sleep(Time const& time) { hub()->timeoutIs(Timeout(time, current())); current()->wait(); }