int MediaSession::RecorderAttachToVideoMixerPort(int recorderId,int mixerId,int portId) { //Get Player Recorders::iterator it = recorders.find(recorderId); //If not found if (it==recorders.end()) //Exit return Error("Recorder not found\n"); //Get it Recorder* recorder = it->second; //Get Player VideoMixers::iterator itMixer = videoMixers.find(mixerId); //If not found if (itMixer==videoMixers.end()) //Exit return Error("AudioMixerResource not found\n"); //Get it VideoMixerResource* videoMixer = itMixer->second; //And attach return recorder->Attach(MediaFrame::Video,videoMixer->GetJoinable(portId)); }
int MediaSession::RecorderAttachToEndpoint(int recorderId,int endpointId,MediaFrame::Type media) { //Get Player Recorders::iterator it = recorders.find(recorderId); //If not found if (it==recorders.end()) //Exit return Error("Recorder not found\n"); //Get it Recorder* recorder = it->second; //Get source endpoint Endpoints::iterator itEndpoints = endpoints.find(endpointId); //If not found if (itEndpoints==endpoints.end()) //Exit return Error("Endpoint not found\n"); //Get it Endpoint* source = itEndpoints->second; //Attach return recorder->Attach(media,source->GetJoinable(media)); }
int main(int argc, char **argv) { Recorder rec; rec.initialiseParameters(); msl_ptgrey_camera::MSLPtGreyCamera cam; auto error = cam.init(); if(error == -1) { cout << "Error! Try again!" << endl; return error; } int frame = 0; while (frame < rec.maxImages) { frame++; string fpath = rec.path; fpath.append(std::to_string(frame)); fpath.append(".raw"); cout << fpath << endl; // cout << "frame:" << frame << ", maxImages: " << rec.maxImages << endl; cam.saveCurrentImageToFile(fpath); sleep(static_cast<unsigned int>(1 / std::max(0.033, (double)rec.freq))); } cout << "finished recording" << endl; return 0; }
/* Composite fields are NOT supported so far */ void RecorderControlWidget::getParameters(Recorder& params) const { params.setPath (mUi->pathEdit->text().toStdString()); params.setFileTemplate (mUi->fileTemplateEdit->text().toStdString()); }
void SampleChannel::calcVolumeEnv(int frame) { /* method: check this frame && next frame, then calculate delta */ Recorder::action *a0 = NULL; Recorder::action *a1 = NULL; int res; /* get this action on frame 'frame'. It's unlikely that the action * is not found. */ res = G_Recorder.getAction(index, ACTION_VOLUME, frame, &a0); if (res == 0) return; /* get the action next to this one. * res == -1: a1 not found, this is the last one. Rewind the search * and use action at frame number 0 (actions[0]). * res == -2 ACTION_VOLUME not found. This should never happen */ res = G_Recorder.getNextAction(index, ACTION_VOLUME, frame, &a1); if (res == -1) res = G_Recorder.getAction(index, ACTION_VOLUME, 0, &a1); volume_i = a0->fValue; volume_d = ((a1->fValue - a0->fValue) / ((a1->frame - a0->frame) / 2)) * 1.003f; }
void SampleChannel::quantize(int index, int localFrame, Mixer *mixer) { /* skip if LOOP_ANY or not in quantizer-wait mode */ if ((mode & LOOP_ANY) || !qWait) return; /* no fadeout if the sample starts for the first time (from a * STATUS_OFF), it would be meaningless. */ if (status == STATUS_OFF) { status = STATUS_PLAY; sendMidiLplay(); qWait = false; tracker = fillChan(vChan, tracker, localFrame); /// FIXME: ??? } else //setXFade(localFrame); reset(localFrame); /* this is the moment in which we record the keypress, if the * quantizer is on. SINGLE_PRESS needs overdub */ if (G_Recorder.canRec(this, mixer)) { if (mode == SINGLE_PRESS) { G_Recorder.startOverdub(index, ACTION_KEYS, mixer->currentFrame, G_KernelAudio.realBufsize); readActions = false; // don't read actions while overdubbing } else G_Recorder.rec(index, ACTION_KEYPRESS, mixer->currentFrame); hasActions = true; } }
void display(void) { uint nPoints; glClear(GL_COLOR_BUFFER_BIT); shapeEditHook = examineShape; ShapeMatches shapeMatches(0.75f, 1); Recording & recording = *g_recorder.eject(); for(uint i = 0; i < recording.length(); i++) { nPoints = recording[i]->length(); g_shapes->compare(recording[i], &shapeMatches); if(nPoints > (800u / recordTime)) g_recorder.erase(0, nPoints - (800u / recordTime)); } g_recorder.erase(&recording); glutSwapBuffers(); }
void Autonomous(void) { GetWatchdog().SetEnabled(false); if (recorder.StartPlayback()) { while (IsAutonomous() && recorder.Playback()); } }
void RecorderControlWidget::setParameters(const Recorder &input) { // Block signals to send them all at once bool wasBlocked = blockSignals(true); mUi->pathEdit->setText(QString::fromStdString(input.path())); mUi->fileTemplateEdit->setText(QString::fromStdString(input.fileTemplate())); blockSignals(wasBlocked); emit paramsChanged(); }
__declspec(dllexport) Status Recorder_create(Recorder*& rec, const char* fileName) { Recorder* vsl = new Recorder(); Status status = vsl->create(fileName); if (status == STATUS_OK) rec = vsl; else Recorder_destroy(vsl); return status; }
//---------------------------------------------------------------------------------------------------------------------- void CTRNN::record(Recorder& recorder) { for(int i = 0; i < size; ++i) { recorder.push_back("NeuralInput" + toString(i), inputs[i]); recorder.push_back("NeuralState" + toString(i), states[i]); recorder.push_back("NeuralOutput" + toString(i), outputs[i]); recorder.push_back("NeuralExtInput" + toString(i), externalinputs[i]); } }
void Recorder::newWindow() { if (recorderWindow_ == Secondary || secondaryRecorderPresent_) return; // only allow a single secondary window if (administrationAllowed()) { Recorder* newRecorder = new Recorder(study_, currentDisk_, user(), options(), false, Secondary, this); newRecorder->restore(); secondaryRecorderPresent_ = true; } }
RobotDemo(void): myRobot(leftMotor, rightMotor), stick(1), leftMotor(1), rightMotor(2) { GetWatchdog().SetExpiration(100); recorder.AddMotor(&leftMotor); recorder.AddMotor(&rightMotor); }
int MediaSession::RecorderStop(int recorderId) { //Get recorder Recorders::iterator it = recorders.find(recorderId); //If not found if (it==recorders.end()) //Exit return Error("Recorder not found\n"); //Get it Recorder* recorder = it->second; //Stop recording return recorder->Close(); }
XN_THREAD_PROC Recorder::threadMain(XN_THREAD_PARAM pThreadParam) { Recorder* pSelf = reinterpret_cast<Recorder*>(pThreadParam); if (NULL != pSelf) { pSelf->m_running = TRUE; while (pSelf->m_running) { pSelf->messagePump(); } } XN_THREAD_PROC_RETURN(XN_STATUS_OK); }
int main(int argc, char *argv[]) { Network yarp; if (!yarp.checkNetwork()) { yError()<<"YARP network seems unavailable!"; return 1; } ResourceFinder rf; rf.configure(argc,argv); Recorder recorder; return recorder.runModule(rf); }
int MediaSession::RecorderDettach(int recorderId,MediaFrame::Type media) { //Get Player Recorders::iterator it = recorders.find(recorderId); //If not found if (it==recorders.end()) //Exit return Error("Recorder not found\n"); //Get it Recorder* recorder = it->second; //Attach return recorder->Dettach(media); }
int main(int argc, const char* const* argv) { Recorder recorder; while (true) { static struct option sLongOptions[] = { { "help", no_argument, 0, 'h' }, { 0, 0, 0, 0 } }; opterr = 0; // don't print errors int c = getopt_long(argc, (char**)argv, "+hl", sLongOptions, NULL); if (c == -1) break; switch (c) { case 'h': print_usage_and_exit(false); break; case 'l': recorder.SetSkipLoading(false); break; default: print_usage_and_exit(true); break; } } // Remaining arguments should be the output file and the optional command // line. if (optind >= argc) print_usage_and_exit(true); const char* outputFile = argv[optind++]; const char* const* programArgs = argv + optind; int programArgCount = argc - optind; // prepare for battle if (recorder.Init(outputFile) != B_OK) exit(1); // start the action recorder.Run(programArgs, programArgCount); return 0; }
void Init(HWND hDlgWnd) { g_hMainWnd = hDlgWnd; // 设置窗口图标 SendMessage(hDlgWnd, WM_SETICON, ICON_BIG, (LPARAM)g_hIcon); // 添加所有的录音设备到ComboBox recorder.EnumDevs((EnumDevsProc)AddRecordDev); HWND hComboWnd = GetDlgItem(g_hMainWnd, IDC_COMBO_DEVS); // 如果有1个以上的录音设备,则选中第一个录音设备 int count = SendMessage(hComboWnd, CB_GETCOUNT, NULL, NULL); if (count != 0) { SendMessage(hComboWnd, CB_SETCURSEL, NULL, 0); } // 设置托盘图标 g_nid.cbSize = sizeof(g_nid); g_nid.uID = IDC_NOTIFYICON; g_nid.uCallbackMessage = WM_AIRSOUND_NOTIFY; g_nid.hIcon = g_hIcon; g_nid.hWnd = g_hMainWnd; // 必须指定处理托盘图标消息的句柄,否则鼠标移到托盘图标上,图标即消失 g_nid.uFlags = NIF_ICON | NIF_TIP | NIF_MESSAGE; strcpy(g_nid.szTip, TEXT("AirSoundServer")); Shell_NotifyIcon(NIM_ADD, &g_nid); EnableBlurBehind(g_hMainWnd); }
int MediaSession::RecorderRecord(int recorderId,const char* filename) { //Get recorder Recorders::iterator it = recorders.find(recorderId); //If not found if (it==recorders.end()) //Exit return Error("Recorder not found\n"); //Get it Recorder* recorder = it->second; //create recording if (!recorder->Create(filename)) //Error return Error("-Could not create file"); //Start recording return recorder->Record(); }
int ShadowSubdomain::addRecorder(Recorder &theRecorder) { msgData(0) = ShadowActorSubdomain_addRecorder; msgData(1) = theRecorder.getClassTag(); this->sendID(msgData); this->sendObject(theRecorder); return 0; }
int main(int argc, char* argv[]) { cout << argc << endl; for (int h = 0; h<argc; h++) cout << argv[h] << endl; cout << endl; Recorder* rec = Recorder::GetInstance(); //One of each device TestDevice* dev = new TestDevice("dev"); Relay* rel = new Relay(); SpeedController* ctrl = new SpeedController(); Servo* serv = new Servo(); DoubleSolenoid* ds = new DoubleSolenoid(); Solenoid* sol = new Solenoid(); //Add all devices to recorder rec->AddDevice("Relay",rel); rec->AddDevice("Speed Controller",ctrl); rec->AddDevice("Servo",serv); rec->AddDevice("Double Solenoid",ds); rec->AddDevice("Solenoid",sol); rec->AddDevice(dev); //Creates macro cout << dev->GetName() << endl; Macro* mac = rec -> macro(); int iterations = 5; for (int i = 0; i<iterations; i++) { mac->Record(); } mac->WriteFile("auto.csv"); mac->Reset(); mac->ReadFile("auto.csv"); while (!mac->IsFinished()) { mac->PlayBack(); } cout << "plz work" << endl; mac->Reset(); Command* recCom = mac->NewRecordFileCommand("auto2.csv"); recCom->Initialize(); for (int i = 0; i < 30; i ++) recCom->Execute(); recCom->End(); return 0; }
void OperatorControl(void) { GetWatchdog().SetEnabled(true); printf("Entered OperatorControl\n"); while (IsOperatorControl()) { GetWatchdog().Feed(); // use the trigger to start recording.. at the moment, // it just gets ignored if you call it more than once if (stick.GetTrigger()) recorder.StartRecording(); myRobot.ArcadeDrive(stick); // drive with arcade style (use right stick) // always call the recording routine recorder.Record(); } }
//---------------------------------------------------- // レコーダーの設定 //---------------------------------------------------- XnStatus setRecorder(Recorder recorder, XnStatus rc){ //XnStatus rc; // レコーダーの作成 rc = recorder.Create(g_context); if (rc != XN_STATUS_OK) { cout << "error!" << endl; throw std::runtime_error(xnGetStatusString(rc)); } // 記録設定 rc = recorder.SetDestination(XN_RECORD_MEDIUM_FILE, OUT_RECORDE_PATH); if (rc != XN_STATUS_OK) { cout << "error!" << endl; throw std::runtime_error(xnGetStatusString(rc)); } // イメージを記録対象に追加 rc = recorder.AddNodeToRecording(g_image, XN_CODEC_JPEG); if (rc != XN_STATUS_OK) { cout << "error!" << endl; throw std::runtime_error(xnGetStatusString(rc)); } // デプスを記録対象に追加 rc = recorder.AddNodeToRecording(g_depth, XN_CODEC_UNCOMPRESSED); if (rc != XN_STATUS_OK) { cout << "error!" << endl; std::cout << __LINE__ << std::endl; throw std::runtime_error(xnGetStatusString(rc)); } // 記録開始(WaitOneUpdateAllのタイミングで記録される) rc = recorder.Record(); if (rc != XN_STATUS_OK) { cout << "error!" << endl; throw std::runtime_error(xnGetStatusString(rc)); } cout << "record set ok!" << endl; return rc; }
void CleanUpExit() { recorder.Release(); g_player.Release(); g_image.Release(); g_scriptNode.Release(); g_context.Release(); g_hands.Release(); g_gesture.Release(); free(g_pTexMap); exit(1); }
//---------------------------------------------------------------------------------------------------------------------- void SMCAgent::record(Recorder& recorder) { recorder.push_back("PosX", m_position[0]); recorder.push_back("PosY", m_position[1]); recorder.push_back("VelX", m_velocity[0]); recorder.push_back("VelY", m_velocity[1]); recorder.push_back("Angle", m_angle); recorder.push_back("AngularSpeed", m_angularSpeed); recorder.push_back("Sensor", m_sensedValue); recorder.push_back("SensorDer", m_sensedValueDerivative); recorder.push_back("Energy", m_energy); recorder.push_back("SensedEnergy", getSensedEnergy()); recorder.push_back("Food", m_food); recorder.push_back("Time", m_time); // ctrnn m_ctrnn->record(recorder); }
void timer(int t) { if(t == 0) { g_time += recordTime; g_recorder.record(0, mx, my, 1, g_time); glutPostRedisplay(); glutTimerFunc(10, timer, 0); } else { glutPostRedisplay(); // glutTimerFunc(1231, timer, 1); } }
/* -----------------------------------------------------------------\ | Method: main / program entry | Begin: 19.01.2010 / 15:57:36 | Author: Jo2003 | Description: program entry point | | Parameters: command line parameters | | Returns: 0 ==> ok | else ==> any error \----------------------------------------------------------------- */ int main(int argc, char *argv[]) { // bugfix for crash on exit on *nix ... #ifdef Q_WS_X11 XInitThreads(); #endif qRegisterMetaType<vlcvid::SContextAction>("vlcvid::SContextAction"); qRegisterMetaType<QLangVector>("QLangVector"); #ifdef Q_OS_MACX if ( QSysInfo::MacintoshVersion > QSysInfo::MV_10_8 ) { // fix Mac OS X 10.9 (mavericks) font issue // https://bugreports.qt-project.org/browse/QTBUG-32789 QFont::insertSubstitution(".Lucida Grande UI", "Lucida Grande"); } #endif int iRV = -1; QApplication app(argc, argv); Recorder *pRec; QFTSettings *pFTSet; // Setting "app" as parent puts the new generated objects into Qt's memory management, // so no delete is needed since Qt takes care ... pAppTransl = new QTranslator(&app); pQtTransl = new QTranslator(&app); pFolders = new CDirStuff(&app); pHtml = new CHtmlWriter(&app); pWatchStats = new QWatchStats(&app); pChanMap = new QChannelMap(); pStateMsg = new QStateMessage(); // will be parented in recorder.cpp::Recorder()! if (pFolders && pAppTransl && pQtTransl && pHtml && pChanMap && pWatchStats) { if (pFolders->isInitialized ()) { if ((pCustomization = new QCustParser(&app)) != NULL) { pCustomization->parseCust(); pFolders->setAppName(pCustomization->strVal("APP_NAME")); // make sure debug stuff is written from the very begining ... VlcLog.SetLogFile(pFolders->getDataDir(), QString("%1.log").arg(pFolders->getBinName())); VlcLog.SetLogLevel(vlclog::LOG_ALL); QApplication::installTranslator (pQtTransl); QApplication::installTranslator (pAppTransl); pApiClient = new ApiClient(&app); pApiParser = new ApiParser(&app); // The database is the last service used. // Make sure it destroyed latest! // Therefore we don't set app as parent! pDb = new CVlcRecDB(); if (pDb && pApiClient && pApiParser) { // check if needed settings are there ... if ((pDb->stringValue("User") == "") && (pDb->stringValue("PasswdEnc") == "")) { if ((pFTSet = new QFTSettings()) != NULL) { pFTSet->exec(); delete pFTSet; pFTSet = NULL; } } if ((pRec = new Recorder()) != NULL) { pRec->show(); iRV = app.exec (); delete pRec; pRec = NULL; } } // delete database ... if (pDb) { delete pDb; pDb = NULL; } } } } if (pChanMap) { delete pChanMap; pChanMap = NULL; } return iRV; }
/** Returns either all network parameters, the values of a global variable, values of an object, connections or parameter lists. */ int csimMexGet(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[]) { if ( !TheNetwork ) mexErrMsgTxt("CSIM: No network initialized yet!\n"); if ( nrhs < 1 || nrhs > 3 ) mexErrMsgTxt("CSIM-Usage: value = csim('get',idx[,fieldName]); or\n" " value = csim('get'[,globalvar]); or\n" " [pre,post] = csim('get',idx,'connections');\n" ); if ( nrhs < 2 ) { // csim('get'): return all network parameters TheNetwork->printFields(); return 0; } char *globalVar; if ( getString(prhs[1],&globalVar) == 0) { // csim('get', 'globalVar'): return value of a global variable if ( nrhs > 2 ) mexErrMsgTxt("CSIM-Usage: csim('get',globalvar);"); double tmp; if ( TheNetwork->getField(globalVar,&tmp) < 0 ) return -1; plhs[0] = mxCreateScalarDouble(tmp); } else { // Return info about a network object Advancable *a; uint32 *idx; int nIdx; char *fieldName; // Get index of the object if ( getUint32Vector(prhs[1],&idx,&nIdx) ) mexErrMsgTxt("CSIM-Usage: P=csim('get',idx[,fieldName]); idx is not a uint32 vector.\n"); if ( (a=TheNetwork->getObject(idx[0])) ){ Recorder *r; if ( (r = dynamic_cast<Recorder *>(a)) && (nrhs > 2) ) { // Return recorder traces if ( getString(prhs[2],&fieldName) ) mexErrMsgTxt("CSIM-Usage: P=csim('get',idx,fieldName); fieldName is not a string.\n"); if ( (0 == strncmp(fieldName,"traces",strlen(fieldName))) ) { plhs[0] = r->getMxStructArray(); return 0; } } /* *************** BEGIN MICHAEL PFEIFFER **************** */ // Return readout info Readout *ro; if ( (ro = dynamic_cast<Readout *>(a)) && (nrhs > 2) ) { if ( getString(prhs[2],&fieldName) ) mexErrMsgTxt("CSIM-Usage: P=csim('get',idx,fieldName); fieldName is not a string.\n"); // Return readout filters if ( (0 == strncmp(fieldName,"filters",strlen(fieldName))) ) { plhs[0] = getMxReadoutFilters(ro); return 0; } // Return readout preprocessors if ( (0 == strncmp(fieldName,"preprocessors",strlen(fieldName))) ) { plhs[0] = getMxReadoutPreprocessors(ro); return 0; } // Return readout algorithm if ( (0 == strncmp(fieldName,"algorithm",strlen(fieldName))) ) { plhs[0] = getMxReadoutAlgorithm(ro); return 0; } } PhysicalModel *phm; if ( (phm = dynamic_cast<PhysicalModel *>(a)) && (nrhs > 2) ) { if ( getString(prhs[2],&fieldName) ) mexErrMsgTxt("CSIM-Usage: P=csim('get',idx,fieldName); fieldName is not a string.\n"); // Return model input names and connections if ( (0 == strncmp(fieldName,"inputs",strlen(fieldName))) ) { plhs[0] = getMxModelInputs(phm); return 0; } // Return model output names and connections if ( (0 == strncmp(fieldName,"outputs",strlen(fieldName))) ) { plhs[0] = getMxModelOutputs(phm); return 0; } } /* *************** END MICHAEL PFEIFFER **************** */ } else { TheCsimError.add("csim('get',idx,...); idx(1) is not a valid object index!\n"); return -1; } if ( nrhs == 3 ) { if ( getString(prhs[2],&fieldName) ) mexErrMsgTxt("CSIM-Usage: P=csim('get',idx,fieldName); fieldName is not a string.\n"); if ( 0 == strcmp(fieldName,"connections") ) { // Return connections of a network object if ( nlhs != 2 ) { mexErrMsgTxt("CSIM-Usage: [pre,post]=csim('get',idx,'connections');" " needs two return arguments.\n"); } if ( nIdx != 1 ) { mexErrMsgTxt("CSIM-Usage: [pre,post]=csim('get',idx,'connections');" " idx must be a uint32 scalar.\n"); } if ( (a=TheNetwork->getObject(idx[0])) ) { // Return connections of neuron or synapse Neuron *n; Synapse *s; if ( (s = dynamic_cast<Synapse *>(a)) ) { // Get synapse connections plhs[0] = mxCreateNumericMatrix ( 1, 1, mxUINT32_CLASS, mxREAL ); plhs[1] = mxCreateNumericMatrix ( 1, 1, mxUINT32_CLASS, mxREAL ); *(uint32 *)(mxGetData(plhs[0])) = s->getPre(); *(uint32 *)(mxGetData(plhs[1])) = s->getPost(); } else if ( (n = dynamic_cast<Neuron *>(a)) ) { // Get neuron connections unsigned nPre = 0; unsigned nPost = 0; if ( (nPre = n->nPre()) > 0 ) { // Get presynaptic connections plhs[0] = mxCreateNumericMatrix ( 1, nPre, mxUINT32_CLASS, mxREAL ); n->getPre((uint32 *)mxGetData(plhs[0])); } else { plhs[0] = mxCreateDoubleMatrix ( 0, 0, mxREAL ); } if ( (nPost = n->nPost()) > 0 ) { // Get postsynaptic inputs plhs[1] = mxCreateNumericMatrix ( 1, nPost, mxUINT32_CLASS, mxREAL ); n->getPost((uint32 *)mxGetData(plhs[1])); } else { plhs[1] = mxCreateDoubleMatrix ( 0, 0, mxREAL ); } } else {TheCsimError.add("csim('get',idx,'connections'); idx(1) is not a synapse or neuron!\n"); return -1;} } else {TheCsimError.add("csim('get',idx,'connections'); idx(1) is not a valid object index!\n"); return -1;} } else if ( 0 == strcmp(fieldName,"struct") ) { // Return struct description of the object Advancable *a; if ( (a=TheNetwork->getObject(idx[0])) ) plhs[0] = getMxClassInfo(a); else {TheCsimError.add("csim('get',idx,'struct'); idx(1) is not a valid object index!\n"); return -1;} } else { // Return single parameter value of several objects double *p=0; int m; if ( TheNetwork->getParameter(idx,nIdx,fieldName,&p,&m) < 0 ) { if ( p ) free(p); p=0; return -1; } plhs[0] = mxCreateDoubleMatrix(m, nIdx, mxREAL); memcpy(mxGetPr(plhs[0]),p,m*nIdx*sizeof(double)); if ( p ) free(p); p=0; return 0; } } else if ( nrhs == 2 ) { // nrhs == 2: no fieldName given // so we print out the values of all registerd fields of idx[0] Advancable *a; if ( nlhs < 1 ) { for(int i=0; i<nIdx; i++) { if ( (a=TheNetwork->getObject(idx[i])) ) a->printFields((char *)a); else {TheCsimError.add("csim('get',idx); idx(%i) is not a valid object index!\n",i+1); return -1;} } } else { if ( (a=TheNetwork->getObject(idx[0])) ) plhs[0] = getMxClassInfo(a); else {TheCsimError.add("csim('get',idx); idx(1) is not a valid object index!\n"); return -1;} } } } return 0; }
/** * Main application entry point. * * Accepted argumemnts: * - cpu Perform depth processing with the CPU. * - gl Perform depth processing with OpenGL. * - cl Perform depth processing with OpenCL. * - <number> Serial number of the device to open. * - -noviewer Disable viewer window. * - -streamer Enable UDP Streaming of captured images. * - -recorder Enable recording of captured images. * - -replay Enable replay of captured images. */ int main(int argc, char *argv[]) /// [main] { std::string program_path(argv[0]); std::cerr << "Version: " << LIBFREENECT2_VERSION << std::endl; std::cerr << "Environment variables: LOGFILE=<protonect.log>" << std::endl; std::cerr << "Usage: " << program_path << " [-gpu=<id>] [gl | cl | clkde | cuda | cudakde | cpu] [<device serial>]" << std::endl; std::cerr << " [-noviewer] [-norgb | -nodepth] [-help] [-version]" << std::endl; std::cerr << " [-recorder] [-streamer] [-replay]" << std::endl; std::cerr << " [-frames <number of frames to process>]" << std::endl; std::cerr << "To pause and unpause: pkill -USR1 ProtonectSR" << std::endl; size_t executable_name_idx = program_path.rfind("ProtonectSR"); const std::string prog(argv[0]); std::string binpath = "/"; if(executable_name_idx != std::string::npos) { binpath = program_path.substr(0, executable_name_idx); } #if defined(WIN32) || defined(_WIN32) || defined(__WIN32__) // avoid flooing the very slow Windows console with debug messages libfreenect2::setGlobalLogger(libfreenect2::createConsoleLogger(libfreenect2::Logger::Info)); #else // create a console logger with debug level (default is console logger with info level) /// [logging] libfreenect2::setGlobalLogger(libfreenect2::createConsoleLogger(libfreenect2::Logger::Debug)); /// [logging] #endif /// [file logging] MyFileLogger *filelogger = new MyFileLogger(getenv("LOGFILE")); if (filelogger->good()) libfreenect2::setGlobalLogger(filelogger); else delete filelogger; /// [file logging] /// [context] libfreenect2::Freenect2 freenect2; // TODO: enable on merge //libfreenect2::Freenect2Replay freenect2replay; libfreenect2::Freenect2Device *dev = 0; libfreenect2::PacketPipeline *pipeline = 0; /// [context] std::string serial = ""; bool viewer_enabled = true; bool streamer_enabled = false; bool recorder_enabled = false; bool replay_enabled = false; bool enable_rgb = true; bool enable_depth = true; int deviceId = -1; size_t framemax = -1; for(int argI = 1; argI < argc; ++argI) { const std::string arg(argv[argI]); if(arg == "-help" || arg == "--help" || arg == "-h" || arg == "-v" || arg == "--version" || arg == "-version") { // Just let the initial lines display at the beginning of main return 0; } else if(arg.find("-gpu=") == 0) { if (pipeline) { std::cerr << "-gpu must be specified before pipeline argument" << std::endl; return -1; } deviceId = atoi(argv[argI] + 5); } else if(arg == "cpu") { if(!pipeline) /// [pipeline] pipeline = new libfreenect2::CpuPacketPipeline(); /// [pipeline] } else if(arg == "gl") { #ifdef LIBFREENECT2_WITH_OPENGL_SUPPORT if(!pipeline) pipeline = new libfreenect2::OpenGLPacketPipeline(); #else std::cout << "OpenGL pipeline is not supported!" << std::endl; #endif } else if(arg == "cl") { #ifdef LIBFREENECT2_WITH_OPENCL_SUPPORT if(!pipeline) pipeline = new libfreenect2::OpenCLPacketPipeline(deviceId); #else std::cout << "OpenCL pipeline is not supported!" << std::endl; #endif } else if(arg == "clkde") { #ifdef LIBFREENECT2_WITH_OPENCL_SUPPORT if(!pipeline) pipeline = new libfreenect2::OpenCLKdePacketPipeline(deviceId); #else std::cout << "OpenCL pipeline is not supported!" << std::endl; #endif } else if(arg == "cuda") { #ifdef LIBFREENECT2_WITH_CUDA_SUPPORT if(!pipeline) pipeline = new libfreenect2::CudaPacketPipeline(deviceId); #else std::cout << "CUDA pipeline is not supported!" << std::endl; #endif } else if(arg == "cudakde") { #ifdef LIBFREENECT2_WITH_CUDA_SUPPORT if(!pipeline) pipeline = new libfreenect2::CudaKdePacketPipeline(deviceId); #else std::cout << "CUDA pipeline is not supported!" << std::endl; #endif } else if(arg.find_first_not_of("0123456789") == std::string::npos) //check if parameter could be a serial number { serial = arg; } else if(arg == "-noviewer" || arg == "--noviewer") { viewer_enabled = false; } else if(arg == "-norgb" || arg == "--norgb") { enable_rgb = false; } else if(arg == "-nodepth" || arg == "--nodepth") { enable_depth = false; } else if(arg == "-frames") { ++argI; framemax = strtol(argv[argI], NULL, 0); if (framemax == 0) { std::cerr << "invalid frame count '" << argv[argI] << "'" << std::endl; return -1; } } else if(arg == "-streamer" || arg == "--streamer" || prog == "freenect2-stream") { streamer_enabled = true; } else if(arg == "-recorder" || arg == "--recorder" || prog == "freenect2-record") { recorder_enabled = true; } else if(arg == "-replay" || arg == "--replay" || prog == "freenect2-replay") { replay_enabled = true; } else { std::cout << "Unknown argument: " << arg << std::endl; } } if (!enable_rgb && !enable_depth) { std::cerr << "Disabling both streams is not allowed!" << std::endl; return -1; } /// [discovery] if(replay_enabled == false) { if(freenect2.enumerateDevices() == 0) { std::cout << "no device connected!" << std::endl; return -1; } if(serial == "") { serial = freenect2.getDefaultDeviceSerialNumber(); } } /// [discovery] if(replay_enabled == false) { if(pipeline) { /// [open] dev = freenect2.openDevice(serial, pipeline); /// [open] } else { dev = freenect2.openDevice(serial); } } else { DIR *d; struct dirent *dir; std::vector<std::string> frame_filenames; d = opendir("recordings/depth"); if(!d) { std::cerr << "Could not open directory " << dir << " for replay." << std::endl; exit(1); } while((dir = readdir(d)) != NULL) { std::string name = dir->d_name; if(hasSuffix(name, ".depth")) { frame_filenames.push_back(name); } else { std::cerr << "Skipping currently unsupported frame filename: " << name << std::endl; } } // TODO: enable on merge /* if(pipeline) { /// [open] dev = freenect2replay.openDevice(frame_filenames, pipeline); /// [open] } else { dev = freenect2replay.openDevice(frame_filenames); } */ } if(dev == 0) { std::cout << "failure opening device!" << std::endl; return -1; } devtopause = dev; signal(SIGINT,sigint_handler); #ifdef SIGUSR1 signal(SIGUSR1, sigusr1_handler); #endif protonect_shutdown = false; /// [listeners] int types = 0; if (enable_rgb) types |= libfreenect2::Frame::Color; if (enable_depth) types |= libfreenect2::Frame::Ir | libfreenect2::Frame::Depth; libfreenect2::SyncMultiFrameListener listener(types); libfreenect2::FrameMap frames; dev->setColorFrameListener(&listener); dev->setIrAndDepthFrameListener(&listener); /// [listeners] /// [start] if (enable_rgb && enable_depth) { if (!dev->start()) return -1; } else { if (!dev->startStreams(enable_rgb, enable_depth)) return -1; } std::cout << "device serial: " << dev->getSerialNumber() << std::endl; std::cout << "device firmware: " << dev->getFirmwareVersion() << std::endl; /// [start] /// [registration setup] libfreenect2::Registration* registration = new libfreenect2::Registration(dev->getIrCameraParams(), dev->getColorCameraParams()); libfreenect2::Frame undistorted(512, 424, 4), registered(512, 424, 4); /// [registration setup] size_t framecount = 0; #ifdef EXAMPLES_WITH_OPENGL_SUPPORT Viewer viewer; if (viewer_enabled) viewer.initialize(); #else viewer_enabled = false; #endif Streamer streamer; // have to declare it outside statements to be accessible everywhere Recorder recorder; if(streamer_enabled) { streamer.initialize(); } if(recorder_enabled) { recorder.initialize(); } /// [loop start] while(!protonect_shutdown && (framemax == (size_t)-1 || framecount < framemax)) { if (!listener.waitForNewFrame(frames, 10*1000)) // 10 sconds { std::cout << "timeout!" << std::endl; return -1; } libfreenect2::Frame *rgb = frames[libfreenect2::Frame::Color]; libfreenect2::Frame *ir = frames[libfreenect2::Frame::Ir]; libfreenect2::Frame *depth = frames[libfreenect2::Frame::Depth]; /// [loop start] if (enable_rgb && enable_depth) { /// [registration] registration->apply(rgb, depth, &undistorted, ®istered); /// [registration] } framecount++; if (streamer_enabled) { streamer.stream(depth); } if (recorder_enabled) { // TODO: add recording timestamp if max frame number reached // + avoid recording new ones recorder.record(depth, "depth"); recorder.record(®istered, "registered"); // recorder.record(rgb,"rgb"); recorder.registTimeStamp(); } if (!viewer_enabled) { if (framecount % 100 == 0) std::cout << "The viewer is turned off. Received " << framecount << " frames. Ctrl-C to stop." << std::endl; listener.release(frames); continue; } #ifdef EXAMPLES_WITH_OPENGL_SUPPORT if (enable_rgb) { viewer.addFrame("RGB", rgb); } if (enable_depth) { viewer.addFrame("ir", ir); viewer.addFrame("depth", depth); } if (enable_rgb && enable_depth) { viewer.addFrame("registered", ®istered); } protonect_shutdown = protonect_shutdown || viewer.render(); #endif /// [loop end] listener.release(frames); /** libfreenect2::this_thread::sleep_for(libfreenect2::chrono::milliseconds(100)); */ } /// [loop end] if (recorder_enabled) { recorder.saveTimeStamp(); } // TODO: restarting ir stream doesn't work! // TODO: bad things will happen, if frame listeners are freed before dev->stop() :( /// [stop] dev->stop(); dev->close(); /// [stop] delete registration; return 0; }