// Creates a DepthGenerator from Context OpenNIDepthGenerator::OpenNIDepthGenerator(int num) : m_DepthGenerator(NULL), m_Options(NULL) { XnStatus status = XN_STATUS_ERROR; m_DepthGenerator = new DepthGenerator(); NodeInfoList l; // enumerate depth generators OpenNIContext::EnumerateProductionTrees(XN_NODE_TYPE_DEPTH, NULL , l); int i = 0; // look for generator according to number num for (NodeInfoList::Iterator it = l.Begin(); it != l.End(); ++it, ++i){ if (i == num){ NodeInfo ni = *it; status = OpenNIContext::CreateProductionTree(ni, *m_DepthGenerator); } } if(i <= num){ // not enough generators std::ostringstream s; s << "Demanded depth generator nr " << num << " but only " << i << " available."; DEBUG_LOG(s.str()); throw ICLException(s.str()); } if (status != XN_STATUS_OK){ // error while creating depth generator std::ostringstream s; s << "Generator init error " << xnGetStatusString(status); DEBUG_LOG(s.str()); throw ICLException(s.str()); } // create GeneratorOptions for DepthGenerator m_Options = new DepthGeneratorOptions(m_DepthGenerator); m_DepthGenerator -> StartGenerating(); }
//---------------------------------------- bool ofxOpenNICapture::setup(ofxOpenNI & _context, string filename, XnCodecID depthFormat, XnCodecID imageFormat, XnCodecID irFormat, XnCodecID audioFormat){ context = &_context; csFileName = ofToDataPath(filename); nodes[CAPTURE_DEPTH_NODE].captureFormat = depthFormat; nodes[CAPTURE_IMAGE_NODE].captureFormat = imageFormat; nodes[CAPTURE_IR_NODE].captureFormat = irFormat; nodes[CAPTURE_AUDIO_NODE].captureFormat = audioFormat; XnStatus nRetVal = XN_STATUS_OK; NodeInfoList recordersList; nRetVal = context->getXnContext().EnumerateProductionTrees(XN_NODE_TYPE_RECORDER, NULL, recordersList); START_CAPTURE_CHECK_RC(nRetVal, "Enumerate recorders"); // take first NodeInfo chosen = *recordersList.Begin(); pRecorder = new Recorder; nRetVal = context->getXnContext().CreateProductionTree(chosen, *pRecorder); START_CAPTURE_CHECK_RC(nRetVal, "Create recorder"); nRetVal = pRecorder->SetDestination(XN_RECORD_MEDIUM_FILE, csFileName.c_str()); START_CAPTURE_CHECK_RC(nRetVal, "Set output file"); return true; }
void SimKinect::OpenCommon() { XnStatus nRetVal = XN_STATUS_OK; bIsDepthOn = false; bIsImageOn = false; bIsIROn = false; bIsAudioOn = false; bIsPlayerOn = false; bIsUserOn = false; NodeInfoList list; nRetVal = context.EnumerateExistingNodes(list); if (nRetVal == XN_STATUS_OK) { for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it) { switch ((*it).GetDescription().Type) { case XN_NODE_TYPE_DEVICE: (*it).GetInstance(device); break; case XN_NODE_TYPE_DEPTH: bIsDepthOn = true; (*it).GetInstance(depth_generator); break; case XN_NODE_TYPE_IMAGE: bIsImageOn = true; (*it).GetInstance(color_generator); break; case XN_NODE_TYPE_IR: bIsIROn = true; (*it).GetInstance(ir_generator); break; case XN_NODE_TYPE_AUDIO: bIsAudioOn = true; (*it).GetInstance(audio_generator); break; case XN_NODE_TYPE_PLAYER: bIsPlayerOn = true; (*it).GetInstance(player); break; case XN_NODE_TYPE_USER : bIsUserOn = true; (*it).GetInstance(user_generator); break; } } } XnCallbackHandle hDummy; context.RegisterToErrorStateChange(onErrorStateChanged, NULL, hDummy); }
bool captureOpenWriteDevice() { XnStatus nRetVal = XN_STATUS_OK; NodeInfoList recordersList; nRetVal = g_Context.EnumerateProductionTrees(XN_NODE_TYPE_RECORDER, NULL, recordersList); START_CAPTURE_CHECK_RC(nRetVal, "Enumerate recorders"); // take first NodeInfo chosen = *recordersList.Begin(); nRetVal = g_Context.CreateProductionTree(chosen); START_CAPTURE_CHECK_RC(nRetVal, "Create recorder"); g_Capture.pRecorder = new Recorder; nRetVal = chosen.GetInstance(*g_Capture.pRecorder); START_CAPTURE_CHECK_RC(nRetVal, "Get recorder instance"); nRetVal = g_Capture.pRecorder->SetDestination(XN_RECORD_MEDIUM_FILE, g_Capture.csFileName); START_CAPTURE_CHECK_RC(nRetVal, "Set output file"); if (getDevice() != NULL) { nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getDevice(), XN_CODEC_UNCOMPRESSED); START_CAPTURE_CHECK_RC(nRetVal, "add device node"); } if (isDepthOn() && (g_Capture.DepthFormat != CODEC_DONT_CAPTURE)) { nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getDepthGenerator(), g_Capture.DepthFormat); START_CAPTURE_CHECK_RC(nRetVal, "add depth node"); } if (isImageOn() && (g_Capture.ImageFormat != CODEC_DONT_CAPTURE)) { nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getImageGenerator(), g_Capture.ImageFormat); START_CAPTURE_CHECK_RC(nRetVal, "add image node"); } if (isIROn() && (g_Capture.IRFormat != CODEC_DONT_CAPTURE)) { nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getIRGenerator(), g_Capture.IRFormat); START_CAPTURE_CHECK_RC(nRetVal, "add IR stream"); } if (isAudioOn() && (g_Capture.AudioFormat != CODEC_DONT_CAPTURE)) { nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getAudioGenerator(), g_Capture.AudioFormat); START_CAPTURE_CHECK_RC(nRetVal, "add Audio stream"); } return true; }
void openCommon() { XnStatus nRetVal = XN_STATUS_OK; g_bIsDepthOn = false; g_bIsImageOn = false; g_bIsIROn = false; g_bIsAudioOn = false; g_bIsPlayerOn = false; NodeInfoList list; nRetVal = g_Context.EnumerateExistingNodes(list); if (nRetVal == XN_STATUS_OK) { for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it) { switch ((*it).GetDescription().Type) { case XN_NODE_TYPE_DEVICE: (*it).GetInstance(g_Device); break; case XN_NODE_TYPE_DEPTH: g_bIsDepthOn = true; (*it).GetInstance(g_Depth); break; case XN_NODE_TYPE_IMAGE: g_bIsImageOn = true; (*it).GetInstance(g_Image); break; case XN_NODE_TYPE_IR: g_bIsIROn = true; (*it).GetInstance(g_IR); break; case XN_NODE_TYPE_AUDIO: g_bIsAudioOn = true; (*it).GetInstance(g_Audio); break; case XN_NODE_TYPE_PLAYER: g_bIsPlayerOn = true; (*it).GetInstance(g_Player); } } } XnCallbackHandle hDummy; g_Context.RegisterToErrorStateChange(onErrorStateChanged, NULL, hDummy); initConstants(); readFrame(); }
bool captureOpenWriteDevice() { XnStatus nRetVal = XN_STATUS_OK; NodeInfoList recordersList; nRetVal = g_Context.EnumerateProductionTrees(XN_NODE_TYPE_RECORDER, NULL, recordersList); START_CAPTURE_CHECK_RC(nRetVal, "Enumerate recorders"); // take first NodeInfo chosen = *recordersList.Begin(); g_Capture.pRecorder = new Recorder; nRetVal = g_Context.CreateProductionTree(chosen, *g_Capture.pRecorder); START_CAPTURE_CHECK_RC(nRetVal, "Create recorder"); nRetVal = g_Capture.pRecorder->SetDestination(XN_RECORD_MEDIUM_FILE, g_Capture.csFileName); START_CAPTURE_CHECK_RC(nRetVal, "Set output file"); return true; }
// Creates a RgbGenerator from Context OpenNIRgbGenerator::OpenNIRgbGenerator(int num) : m_RgbGenerator(NULL), m_Options(NULL) { XnStatus status; // create DepthGenerator. The Kinect rgb-generator did not work without // a depth generator being initialized beforehand. m_DepthGenerator = new DepthGenerator(); if (XN_STATUS_OK != (status = OpenNIContext::Create(m_DepthGenerator))){ std::string error = xnGetStatusString(status); DEBUG_LOG("DepthGenerator init error '" << error << "'"); throw new ICLException(error); } m_RgbGenerator = new ImageGenerator(); NodeInfoList l; // get all rgb-image generators OpenNIContext::EnumerateProductionTrees(XN_NODE_TYPE_IMAGE, NULL , l); int i = 0; // create generator according to num for (NodeInfoList::Iterator it = l.Begin(); it != l.End(); ++it, ++i){ if (i == num){ NodeInfo ni = *it; status = OpenNIContext::CreateProductionTree(ni, *m_RgbGenerator); } } if(i <= num){ // not enough generators found std::ostringstream s; s << "Demanded rgb generator nr " << num << " but only " << i << " available."; DEBUG_LOG(s.str()); throw ICLException(s.str()); } if (status != XN_STATUS_OK){ // error while creating std::string error = xnGetStatusString(status); DEBUG_LOG("ImageGenerator init error '" << error << "'"); throw new ICLException(error); } // create generator options m_Options = new ImageGeneratorOptions(m_RgbGenerator); m_RgbGenerator -> StartGenerating(); DEBUG_LOG2("done creating OpenNIRgbGenerator"); }
// Creates a IrGenerator from Context OpenNIIRGenerator::OpenNIIRGenerator(int num) : m_IrGenerator(NULL), m_Options(NULL) { XnStatus status = XN_STATUS_ERROR; m_IrGenerator = new IRGenerator(); NodeInfoList l; // enumerate ir generators OpenNIContext::EnumerateProductionTrees(XN_NODE_TYPE_IR, NULL , l); int i = 0; // create generator according to num for (NodeInfoList::Iterator it = l.Begin(); it != l.End(); ++it, ++i){ if (i == num){ NodeInfo ni = *it; status = OpenNIContext::CreateProductionTree(ni, *m_IrGenerator); } } if(i <= num){ // not enough generators std::ostringstream s; s << "Demanded ir generator nr " << num << " but only " << i << " available."; DEBUG_LOG(s.str()); throw ICLException(s.str()); } if (XN_STATUS_OK != status){ // error while creating generator std::string error = xnGetStatusString(status); DEBUG_LOG("IRGenerator init error '" << error << "'"); throw new ICLException(error); } // somehow my kinect did not create the ir images before setting it to // this MapOutputMode. XnMapOutputMode mo; mo.nFPS = 30; mo.nXRes = 640; mo.nYRes = 480; m_IrGenerator -> SetMapOutputMode(mo); // create generator options m_Options = new MapGeneratorOptions(m_IrGenerator); status = m_IrGenerator -> StartGenerating(); DEBUG_LOG2("startgenerating: " << xnGetStatusString(status)); }
bool KinectCamera::Connect(int index) { m_index = index; XnStatus rc; EnumerationErrors errors; rc = context.Init(); NodeInfoList list; rc = context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, list, &errors); int i = 0; for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it, ++i) { if(i == index) { printf("%d\n", i); NodeInfo deviceNodeInfo = *it; context.CreateProductionTree(deviceNodeInfo); rc |= depth.Create(context); rc |= image.Create(context); XnMapOutputMode m; m.nXRes = 640; m.nYRes = 480; image.SetMapOutputMode(m); rc |= context.StartGeneratingAll(); break; } } if(i != index) { return false; } printf("Success: %d\n", rc); return true; }
/** * Inicia los nodos de información provinientes de Kinect * */ void openCommon(){ XnStatus nRetVal = XN_STATUS_OK; NodeInfoList list; nRetVal = g_Context.EnumerateExistingNodes(list); if (nRetVal == XN_STATUS_OK){ for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it){ switch ((*it).GetDescription().Type){ case XN_NODE_TYPE_DEPTH: (*it).GetInstance(g_DepthGenerator); break; case XN_NODE_TYPE_IMAGE: (*it).GetInstance(g_ImageGenerator); break; case XN_NODE_TYPE_USER: (*it).GetInstance(g_UserGenerator); break; case XN_NODE_TYPE_PLAYER: (*it).GetInstance(g_Player); break; } } } }//openCommon
XnStatus GeneratorWatcher::UpdateFrameSync() { // go over all nodes, and find the frame synced one Context context; m_generator.GetContext(context); NodeInfoList nodes; XnStatus nRetVal = context.EnumerateExistingNodes(nodes); XN_IS_STATUS_OK(nRetVal); for (NodeInfoList::Iterator it = nodes.Begin(); it != nodes.End(); ++it) { NodeInfo info = *it; // make sure this is a generator if (xnIsTypeDerivedFrom(info.GetDescription().Type, XN_NODE_TYPE_GENERATOR)) { Generator otherGen; nRetVal = info.GetInstance(otherGen); XN_IS_STATUS_OK(nRetVal); if (m_generator.GetFrameSyncCap().IsFrameSyncedWith(otherGen)) { nRetVal = NotifyStringPropChanged(XN_PROP_FRAME_SYNCED_WITH, otherGen.GetName()); XN_IS_STATUS_OK(nRetVal); return XN_STATUS_OK; } } } // if we got here, we're not frame synced nRetVal = NotifyStringPropChanged(XN_PROP_FRAME_SYNCED_WITH, ""); XN_IS_STATUS_OK(nRetVal); return XN_STATUS_OK; }
int main(int argc, char **argv) { XnStatus nRetVal = XN_STATUS_OK; xn::EnumerationErrors errors; if( USE_RECORED_DATA ){ g_Context.Init(); g_Context.OpenFileRecording(RECORD_FILE_PATH); xn::Player player; // Player nodeの取得 nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_PLAYER, player); CHECK_RC(nRetVal, "Find player"); LOG_D("PlaybackSpeed: %d", player.GetPlaybackSpeed()); xn:NodeInfoList nodeList; player.EnumerateNodes(nodeList); for( xn::NodeInfoList::Iterator it = nodeList.Begin(); it != nodeList.End(); ++it){ if( (*it).GetDescription().Type == XN_NODE_TYPE_IMAGE ){ nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_ImageGenerator); CHECK_RC(nRetVal, "Find image node"); LOG_D("%s", "ImageGenerator created."); } else if( (*it).GetDescription().Type == XN_NODE_TYPE_DEPTH ){ nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator); CHECK_RC(nRetVal, "Find depth node"); LOG_D("%s", "DepthGenerator created."); } else{ LOG_D("%s %s %s", ::xnProductionNodeTypeToString((*it).GetDescription().Type ), (*it).GetInstanceName(), (*it).GetDescription().strName); } } } else{ LOG_I("Reading config from: '%s'", CONFIG_XML_PATH); nRetVal = g_Context.InitFromXmlFile(CONFIG_XML_PATH, g_scriptNode, &errors); if (nRetVal == XN_STATUS_NO_NODE_PRESENT){ XnChar strError[1024]; errors.ToString(strError, 1024); LOG_E("%s\n", strError); return (nRetVal); } else if (nRetVal != XN_STATUS_OK){ LOG_E("Open failed: %s", xnGetStatusString(nRetVal)); return (nRetVal); } nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator); CHECK_RC(nRetVal,"No depth"); // ImageGeneratorの作成 nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_ImageGenerator); CHECK_RC(nRetVal, "Find image generator"); } // UserGeneratorの取得 nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator); if(nRetVal!=XN_STATUS_OK){ nRetVal = g_UserGenerator.Create(g_Context); CHECK_RC(nRetVal, "Create user generator"); } XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected; if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)){ LOG_E("%s", "Supplied user generator doesn't support skeleton"); return 1; } nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks); CHECK_RC(nRetVal, "Register to user callbacks"); g_SkeletonCap = g_UserGenerator.GetSkeletonCap(); nRetVal = g_SkeletonCap.RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart); CHECK_RC(nRetVal, "Register to calibration start"); nRetVal = g_SkeletonCap.RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete); CHECK_RC(nRetVal, "Register to calibration complete"); if (g_SkeletonCap.NeedPoseForCalibration()){ g_bNeedPose = TRUE; if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)){ LOG_E("%s", "Pose required, but not supported"); return 1; } nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected); CHECK_RC(nRetVal, "Register to Pose Detected"); g_SkeletonCap.GetCalibrationPose(g_strPose); } g_SkeletonCap.SetSkeletonProfile(XN_SKEL_PROFILE_ALL); nRetVal = g_Context.StartGeneratingAll(); CHECK_RC(nRetVal, "StartGenerating"); // 表示用の画像データの作成 XnMapOutputMode mapMode; g_ImageGenerator.GetMapOutputMode(mapMode); g_rgbImage = cvCreateImage(cvSize(mapMode.nXRes, mapMode.nYRes), IPL_DEPTH_8U, 3); LOG_I("%s", "Starting to run"); if(g_bNeedPose){ LOG_I("%s", "Assume calibration pose"); } xn::Recorder recorder; if( DO_RECORED && !USE_RECORED_DATA ){ // レコーダーの作成 LOG_I("%s", "Setup Recorder"); nRetVal = recorder.Create(g_Context); CHECK_RC(nRetVal, "Create recorder"); // 保存設定 nRetVal = recorder.SetDestination(XN_RECORD_MEDIUM_FILE, RECORD_FILE_PATH); CHECK_RC(nRetVal, "Set recorder destination file"); // 深度、ビデオカメラ入力を保存対象として記録開始 nRetVal = recorder.AddNodeToRecording(g_DepthGenerator, XN_CODEC_NULL); CHECK_RC(nRetVal, "Add depth node to recording"); nRetVal = recorder.AddNodeToRecording(g_ImageGenerator, XN_CODEC_NULL); CHECK_RC(nRetVal, "Add image node to recording"); LOG_I("%s", "Recorder setup done."); } while (!xnOSWasKeyboardHit()) { g_Context.WaitOneUpdateAll(g_UserGenerator); if( DO_RECORED && !USE_RECORED_DATA ){ nRetVal = recorder.Record(); CHECK_RC(nRetVal, "Record"); } // ビデオカメラ画像の生データを取得 xn::ImageMetaData imageMetaData; g_ImageGenerator.GetMetaData(imageMetaData); // メモリコピー xnOSMemCopy(g_rgbImage->imageData, imageMetaData.RGB24Data(), g_rgbImage->imageSize); // BGRからRGBに変換して表示 cvCvtColor(g_rgbImage, g_rgbImage, CV_RGB2BGR); // UserGeneratorからユーザー識別ピクセルを取得 xn::SceneMetaData sceneMetaData; g_UserGenerator.GetUserPixels(0, sceneMetaData); XnUserID allUsers[MAX_NUM_USERS]; XnUInt16 nUsers = MAX_NUM_USERS; g_UserGenerator.GetUsers(allUsers, nUsers); for (int i = 0; i < nUsers; i++) { // キャリブレーションに成功しているかどうか if (g_SkeletonCap.IsTracking(allUsers[i])) { // スケルトンを描画 DrawSkelton(allUsers[i], i); } } // 表示 cvShowImage("User View", g_rgbImage); // ESCもしくはqが押されたら終了させる if (cvWaitKey(10) == 27) { break; } } if( !USE_RECORED_DATA ){ g_scriptNode.Release(); } g_DepthGenerator.Release(); g_UserGenerator.Release(); g_Context.Release(); if (g_rgbImage != NULL) { cvReleaseImage(&g_rgbImage); } g_Context.Shutdown(); }
int main(int argc, char* argv[]) { XnStatus rc; EnumerationErrors errors; Context b_context; rc = b_context.Init(); NodeInfoList list; rc = b_context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, list, &errors); int i = 0; for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it, ++i) { printf("making node %d\n", i); Context l_context; l_context.Init(); //ScriptNode node; rc = l_context.Init(); NodeInfo deviceNodeInfo = *it; l_context.CreateProductionTree(deviceNodeInfo); DepthGenerator l_depth; ImageGenerator l_image; rc = l_depth.Create(l_context); rc = l_image.Create(l_context); DepthMetaData l_depthMD; ImageMetaData l_imageMD; l_depth.GetMetaData(l_depthMD); l_image.GetMetaData(l_imageMD); rc = l_context.StartGeneratingAll(); g_nTexMapX = (((unsigned short)(l_depthMD.FullXRes()-1) / 512) + 1) * 512; g_nTexMapY = (((unsigned short)(l_imageMD.FullYRes()-1) / 512) + 1) * 512; g_pTexMaps.push_back((XnRGB24Pixel*)malloc(g_nTexMapX * g_nTexMapY * sizeof(XnRGB24Pixel))); g_images.push_back(l_image); g_depths.push_back(l_depth); g_contexts.push_back(l_context); } // OpenGL init glutInit(&argc, argv); glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE | GLUT_DEPTH); glutInitWindowSize(GL_WIN_SIZE_X, GL_WIN_SIZE_Y); glutCreateWindow ("OpenNI Simple Viewer"); //glutFullScreen(); glutSetCursor(GLUT_CURSOR_NONE); glutKeyboardFunc(glutKeyboard); glutDisplayFunc(glutDisplay); glutIdleFunc(glutIdle); glDisable(GL_DEPTH_TEST); glEnable(GL_TEXTURE_2D); // Per frame code is in glutDisplay glutMainLoop(); return 0; }
//---------------------------------------- void ofxOpenNI::openCommon() { XnStatus nRetVal = XN_STATUS_OK; g_bIsDepthOn = false; g_bIsImageOn = false; g_bIsIROn = false; g_bIsAudioOn = false; g_bIsPlayerOn = false; g_bIsDepthRawOnOption = false; NodeInfoList list; nRetVal = g_Context.EnumerateExistingNodes(list); if (nRetVal == XN_STATUS_OK) { for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it) { switch ((*it).GetDescription().Type) { case XN_NODE_TYPE_DEVICE: ofLogVerbose(LOG_NAME) << "Creating device"; (*it).GetInstance(g_Device); break; case XN_NODE_TYPE_DEPTH: ofLogVerbose(LOG_NAME) << "Creating depth generator"; g_bIsDepthOn = true; g_bIsDepthRawOnOption = true; (*it).GetInstance(g_Depth); break; case XN_NODE_TYPE_IMAGE: ofLogVerbose(LOG_NAME) << "Creating image generator"; g_bIsImageOn = true; (*it).GetInstance(g_Image); break; case XN_NODE_TYPE_IR: ofLogVerbose(LOG_NAME) << "Creating ir generator"; g_bIsIROn = true; (*it).GetInstance(g_IR); break; case XN_NODE_TYPE_AUDIO: ofLogVerbose(LOG_NAME) << "Creating audio generator"; g_bIsAudioOn = true; (*it).GetInstance(g_Audio); break; case XN_NODE_TYPE_PLAYER: ofLogVerbose(LOG_NAME) << "Creating player"; g_bIsPlayerOn = true; (*it).GetInstance(g_Player); break; } } } XnCallbackHandle hDummy; g_Context.RegisterToErrorStateChange(onErrorStateChanged, this, hDummy); initConstants(); allocateDepthBuffers(); allocateDepthRawBuffers(); allocateRGBBuffers(); readFrame(); }
XnStatus openDeviceFromXmlWithChoice(const char* csXmlFile, EnumerationErrors& errors) { XnStatus nRetVal = XN_STATUS_OK; xnLogInitFromXmlFile(csXmlFile); nRetVal = g_Context.Init(); XN_IS_STATUS_OK(nRetVal); // find devices NodeInfoList list; nRetVal = g_Context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, list, &errors); XN_IS_STATUS_OK(nRetVal); printf("The following devices were found:\n"); int i = 1; for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it, ++i) { NodeInfo deviceNodeInfo = *it; Device deviceNode; deviceNodeInfo.GetInstance(deviceNode); XnBool bExists = deviceNode.IsValid(); if (!bExists) { g_Context.CreateProductionTree(deviceNodeInfo, deviceNode); // this might fail. } if (deviceNode.IsValid() && deviceNode.IsCapabilitySupported(XN_CAPABILITY_DEVICE_IDENTIFICATION)) { const XnUInt32 nStringBufferSize = 200; XnChar strDeviceName[nStringBufferSize]; XnChar strSerialNumber[nStringBufferSize]; XnUInt32 nLength = nStringBufferSize; deviceNode.GetIdentificationCap().GetDeviceName(strDeviceName, nLength); nLength = nStringBufferSize; deviceNode.GetIdentificationCap().GetSerialNumber(strSerialNumber, nLength); printf("[%d] %s (%s)\n", i, strDeviceName, strSerialNumber); } else { printf("[%d] %s\n", i, deviceNodeInfo.GetCreationInfo()); } // release the device if we created it if (!bExists && deviceNode.IsValid()) { deviceNode.Release(); } } printf("\n"); printf("Choose device to open (1): "); int chosen = 1; scanf("%d", &chosen); // create it NodeInfoList::Iterator it = list.Begin(); for (i = 1; i < chosen; ++i) { it++; } NodeInfo deviceNode = *it; nRetVal = g_Context.CreateProductionTree(deviceNode, g_Device); XN_IS_STATUS_OK(nRetVal); // now run the rest of the XML nRetVal = g_Context.RunXmlScriptFromFile(csXmlFile, g_scriptNode, &errors); XN_IS_STATUS_OK(nRetVal); openCommon(); return (XN_STATUS_OK); }
// fills a Map with available ProductionNodes. used for altern. viewpoint. void fillProductionNodeMap(std::map<std::string, xn::ProductionNode> &pn_map) { ProductionNode n; XnStatus status = XN_STATUS_OK; NodeInfoList l; // RGB OpenNIContext::EnumerateProductionTrees(XN_NODE_TYPE_IMAGE, NULL , l); int i = 0; for (NodeInfoList::Iterator it = l.Begin(); it != l.End(); ++it, ++i){ std::ostringstream tmp; tmp << "rgb"; if(i) tmp << i; NodeInfo ni = *it; status = OpenNIContext::CreateProductionTree(ni, n); if(status == XN_STATUS_OK){ pn_map[tmp.str()] = n; } else { DEBUG_LOG("error while creating Production tree: " << xnGetStatusString(status)); } } // DEPTH OpenNIContext::EnumerateProductionTrees(XN_NODE_TYPE_DEPTH, NULL , l); i = 0; for (NodeInfoList::Iterator it = l.Begin(); it != l.End(); ++it, ++i){ std::ostringstream tmp; tmp << "depth"; if(i) tmp << i; NodeInfo ni = *it; status = OpenNIContext::CreateProductionTree(ni, n); if(status == XN_STATUS_OK){ pn_map[tmp.str()] = n; } else { DEBUG_LOG("error while creating Production tree: " << xnGetStatusString(status)); } } // IR OpenNIContext::EnumerateProductionTrees(XN_NODE_TYPE_IR, NULL , l); i = 0; for (NodeInfoList::Iterator it = l.Begin(); it != l.End(); ++it, ++i){ std::ostringstream tmp; tmp << "ir"; if(i) tmp << i; NodeInfo ni = *it; status = OpenNIContext::CreateProductionTree(ni, n); if(status == XN_STATUS_OK){ pn_map[tmp.str()] = n; } else { DEBUG_LOG("error while creating Production tree: " << xnGetStatusString(status)); } } // AUDIO OpenNIContext::EnumerateProductionTrees(XN_NODE_TYPE_AUDIO, NULL , l); i = 0; for (NodeInfoList::Iterator it = l.Begin(); it != l.End(); ++it, ++i){ std::ostringstream tmp; tmp << "audio"; if(i) tmp << i; NodeInfo ni = *it; status = OpenNIContext::CreateProductionTree(ni, n); if(status == XN_STATUS_OK){ pn_map[tmp.str()] = n; } else { DEBUG_LOG("error while creating Production tree: " << xnGetStatusString(status)); } } }
int main(int argc, char* argv[]) { XnStatus nRetVal = XN_STATUS_OK; Context context; EnumerationErrors errors; Mode mode; // default mode #if XN_PLATFORM == XN_PLATFORM_WIN32 mode = MODE_PLAY; #else mode = MODE_RECORD; #endif // check if mode was provided by user if (argc > 1) { if (strcmp(argv[1], "play") == 0) { mode = MODE_PLAY; } else if (strcmp(argv[1], "record") == 0) { mode = MODE_RECORD; } else { printUsage(argv[0]); return -1; } } // make sure mode is valid #if XN_PLATFORM != XN_PLATFORM_WIN32 if (mode == MODE_PLAY) { printf("Playing is not supported on this platform!\n"); return -1; } #endif ScriptNode scriptNode; nRetVal = context.InitFromXmlFile(SAMPLE_XML_PATH, scriptNode); if (nRetVal == XN_STATUS_NO_NODE_PRESENT) { XnChar strError[1024]; errors.ToString(strError, 1024); printf("%s\n", strError); return (nRetVal); } else if (nRetVal != XN_STATUS_OK) { printf("Open failed: %s\n", xnGetStatusString(nRetVal)); return (nRetVal); } // find audio nodes AudioGenerator gens[nSupportedNodes]; XnUInt32 nNodes = 0; NodeInfoList list; nRetVal = context.EnumerateExistingNodes(list, XN_NODE_TYPE_AUDIO); CHECK_RC(nRetVal, "Enumerate audio nodes"); for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it) { NodeInfo info = *it; nRetVal = info.GetInstance(gens[nNodes]); CHECK_RC(nRetVal, "Get audio node"); nNodes++; } if (nNodes == 0) { printf("No audio node was found!\n"); return -1; } if (mode == MODE_PLAY) { nRetVal = play(context, gens, nNodes); } else if (mode == MODE_RECORD) { nRetVal = record(context, gens, nNodes); } scriptNode.Release(); for (int i = 0; i < nSupportedNodes; ++i) gens[i].Release(); context.Release(); return nRetVal; }
int main(int argc, char* argv[]) { XnStatus nRetVal = XN_STATUS_OK; if (argc < 3) { printUsage(argv[0]); return -1; } const char* strInputFile = argv[1]; const char* strOutputFile = argv[2]; // options const XnChar* astrNodeNames[MAX_NODES_COUNT]; XnUInt32 nNodeNames = 0; XnProductionNodeType aNodeTypes[MAX_NODES_COUNT]; XnUInt32 nNodeTypes = 0; const XnChar* strPrimaryNode = NULL; XnProductionNodeType primaryNodeType = XN_NODE_TYPE_INVALID; XnUInt32 nStartFrame = 0; XnUInt32 nEndFrame = XN_MAX_UINT32; //----------------------------------------------------------------------- // Parsing Options //----------------------------------------------------------------------- for (int i = 3; i < argc; ++i) { // look for '=' char* equalSign = strchr(argv[i], '='); if (equalSign == NULL) { printUsage(argv[0]); return -1; } *equalSign = '\0'; char* option = argv[i]; char* optionArg = equalSign + 1; // now check which option is that if (strcmp(option, "--nodes") == 0) { for (;;) { char* commaPos = strchr(optionArg, ','); if (commaPos != NULL) { *commaPos = '\0'; } if (strlen(optionArg) == 0) { printUsage(argv[0]); return -1; } astrNodeNames[nNodeNames++] = optionArg; if (commaPos == NULL) { break; } else { optionArg = commaPos + 1; } } } else if (strcmp(option, "--types") == 0) { for (;;) { char* commaPos = strchr(optionArg, ','); if (commaPos != NULL) { *commaPos = '\0'; } nRetVal = xnProductionNodeTypeFromString(optionArg, &aNodeTypes[nNodeNames]); if (nRetVal != XN_STATUS_OK) { printf("%s is not a valid node type!\n", optionArg); return -1; } ++nNodeTypes; if (commaPos == NULL) { break; } else { optionArg = commaPos + 1; } } } else if (strcmp(option, "--primary-node") == 0) { strPrimaryNode = optionArg; } else if (strcmp(option, "--primary-node-type") == 0) { nRetVal = xnProductionNodeTypeFromString(optionArg, &primaryNodeType); if (nRetVal != XN_STATUS_OK) { printf("%s is not a valid node type!\n", optionArg); return -1; } } else if (strcmp(option, "--start-frame") == 0) { nStartFrame = atoi(optionArg); } else if (strcmp(option, "--end-frame") == 0) { nEndFrame = atoi(optionArg); } else { printUsage(argv[0]); return -1; } } // validate options if (nNodeNames > 0 && nNodeTypes > 0) { printf("Cannot use --nodes and --types together.\n"); return -1; } if (primaryNodeType != XN_NODE_TYPE_INVALID && strPrimaryNode != NULL) { printf("Cannot use --primary-node and --primary-node-type together.\n"); return -1; } // start and end requires primary node if ((nStartFrame != 0 || nEndFrame != XN_MAX_UINT32) && (strPrimaryNode == NULL && primaryNodeType == XN_NODE_TYPE_INVALID)) { printf("A primary node must be defined for using --start-frame or --end-frame.\n"); return -1; } //----------------------------------------------------------------------- // Execute //----------------------------------------------------------------------- Context context; nRetVal = context.Init(); CHECK_RC(nRetVal, "Init"); // open input file Player player; nRetVal = context.OpenFileRecording(strInputFile, player); CHECK_RC(nRetVal, "Open input file"); // play as fast as you can nRetVal = player.SetPlaybackSpeed(XN_PLAYBACK_SPEED_FASTEST); CHECK_RC(nRetVal, "Setting playback speed"); // don't rewind recording nRetVal = player.SetRepeat(FALSE); XN_IS_STATUS_OK(nRetVal); // get the list of all created nodes NodeInfoList nodes; nRetVal = player.EnumerateNodes(nodes); CHECK_RC(nRetVal, "Enumerate nodes"); // first of all, find primary node ProductionNode primaryNode; if (primaryNodeType != XN_NODE_TYPE_INVALID) { nRetVal = context.FindExistingNode(primaryNodeType, primaryNode); if (nRetVal != XN_STATUS_OK) { printf("Input file does not contain any node of type %s\n", xnProductionNodeTypeToString(primaryNodeType)); return -1; } } else if (strPrimaryNode != NULL) { nRetVal = context.GetProductionNodeByName(strPrimaryNode, primaryNode); if (nRetVal != XN_STATUS_OK) { printf("Input file does not contain any node named %s\n", strPrimaryNode); return -1; } } XnUInt32 nTotalFrames = 0; // first seek to end frame (to calculate total amount of work) if (nEndFrame != XN_MAX_UINT32) { nRetVal = player.SeekToFrame(primaryNode.GetName(), nEndFrame, XN_PLAYER_SEEK_SET); CHECK_RC(nRetVal, "Seeking to end frame"); for (NodeInfoList::Iterator it = nodes.Begin(); it != nodes.End(); ++it) { NodeInfo nodeInfo = *it; XnUInt32 nNodeFrames = 0; nRetVal = player.TellFrame(nodeInfo.GetInstanceName(), nNodeFrames); CHECK_RC(nRetVal, "Tell frame"); nTotalFrames += nNodeFrames; } } else { for (NodeInfoList::Iterator it = nodes.Begin(); it != nodes.End(); ++it) { NodeInfo nodeInfo = *it; XnUInt32 nNodeFrames = 0; nRetVal = player.GetNumFrames(nodeInfo.GetInstanceName(), nNodeFrames); CHECK_RC(nRetVal, "Get number of frames"); nTotalFrames += nNodeFrames; } } // seek to start frame if (nStartFrame > 0) { nRetVal = player.SeekToFrame(primaryNode.GetName(), nStartFrame, XN_PLAYER_SEEK_SET); CHECK_RC(nRetVal, "Seeking to start frame"); // remove skipped frames from total for (NodeInfoList::Iterator it = nodes.Begin(); it != nodes.End(); ++it) { NodeInfo nodeInfo = *it; XnUInt32 nNodeFrames = 0; nRetVal = player.TellFrame(nodeInfo.GetInstanceName(), nNodeFrames); CHECK_RC(nRetVal, "Tell frame"); nTotalFrames -= nNodeFrames; } } // create recorder Recorder recorder; nRetVal = recorder.Create(context); CHECK_RC(nRetVal, "Create recorder"); nRetVal = recorder.SetDestination(XN_RECORD_MEDIUM_FILE, strOutputFile); CHECK_RC(nRetVal, "Set recorder destination file"); // add nodes to recorder if (nNodeNames > 0) { for (XnUInt32 i = 0; i < nNodeNames; ++i) { ProductionNode node; nRetVal = context.GetProductionNodeByName(astrNodeNames[i], node); if (nRetVal != XN_STATUS_OK) { printf("Input file does not contain any node named %s\n", astrNodeNames[i]); return -1; } nRetVal = recorder.AddNodeToRecording(node); CHECK_RC(nRetVal, "Add to recording"); } } else if (nNodeTypes > 0) { XnBool bAnyNodeAdded = FALSE; for (NodeInfoList::Iterator it = nodes.Begin(); it != nodes.End(); ++it) { NodeInfo nodeInfo = *it; const XnProductionNodeDescription& description = nodeInfo.GetDescription(); for (XnUInt32 i = 0; i < nNodeTypes; ++i) { if (description.Type == aNodeTypes[i]) { ProductionNode node; nRetVal = nodeInfo.GetInstance(node); CHECK_RC(nRetVal, "Get Instance"); nRetVal = recorder.AddNodeToRecording(node); CHECK_RC(nRetVal, "Add to recording"); bAnyNodeAdded = TRUE; break; } } } if (!bAnyNodeAdded) { printf("No node was found in input which matches requested types.\n"); return -1; } } else { // add all nodes for (NodeInfoList::Iterator it = nodes.Begin(); it != nodes.End(); ++it) { NodeInfo nodeInfo = *it; ProductionNode node; nRetVal = nodeInfo.GetInstance(node); CHECK_RC(nRetVal, "Get Instance"); nRetVal = recorder.AddNodeToRecording(node); CHECK_RC(nRetVal, "Add to recording"); } } XnUInt32 nFrame = 0; XnDouble fPercentageFraction = 100.0 / nTotalFrames; while ((nRetVal = context.WaitAnyUpdateAll()) != XN_STATUS_EOF) { CHECK_RC(nRetVal, "Read next frame"); printf("Recording: %.1f%%\r", nFrame * fPercentageFraction); if (primaryNode.IsValid()) { XnUInt32 nCurrentFrame; nRetVal = player.TellFrame(primaryNode.GetName(), nCurrentFrame); CHECK_RC(nRetVal, "Tell frame"); if (nCurrentFrame == nEndFrame) { break; } } ++nFrame; } recorder.Release(); player.Release(); context.Release(); return 0; }
int main(int argc, char* argv[]) { XnStatus nRetVal = XN_STATUS_OK; if (argc < 3) { printf("usage: %s <inputFile> <outputFile> [nodeType] [startFrame] [endFrame]\n", argv[0]); return -1; } const char* strInputFile = argv[1]; const char* strOutputFile = argv[2]; const char* strNodeType = NULL; XnUInt32 nStartFrame = 1; XnUInt32 nEndFrame = XN_MAX_UINT32; XnProductionNodeType seekNodeType = XN_NODE_TYPE_INVALID; if (argc >= 4) { strNodeType = argv[3]; nRetVal = xnProductionNodeTypeFromString(strNodeType, &seekNodeType); if (nRetVal != XN_STATUS_OK) { printf("Bad node type specified: %s\n", strNodeType); return nRetVal; } if (argc >= 5) { nStartFrame = atoi(argv[4]); if (argc >= 6) { nEndFrame = atoi(argv[5]); } } } Context context; nRetVal = context.Init(); CHECK_RC(nRetVal, "Init"); // open input file Player player; nRetVal = context.OpenFileRecording(strInputFile, player); CHECK_RC(nRetVal, "Open input file"); nRetVal = player.SetPlaybackSpeed(XN_PLAYBACK_SPEED_FASTEST); CHECK_RC(nRetVal, "Setting playback speed"); // get the list of all created nodes NodeInfoList nodes; nRetVal = player.EnumerateNodes(nodes); CHECK_RC(nRetVal, "Enumerate nodes"); // create recorder Recorder recorder; nRetVal = recorder.Create(context); CHECK_RC(nRetVal, "Create recorder"); nRetVal = recorder.SetDestination(XN_RECORD_MEDIUM_FILE, strOutputFile); CHECK_RC(nRetVal, "Set recorder destination file"); ProductionNode seekNode; // add all nodes to recorder for (NodeInfoList::Iterator it = nodes.Begin(); it != nodes.End(); ++it) { NodeInfo nodeInfo = *it; // NOTE: for now, ignore audio if (nodeInfo.GetDescription().Type == XN_NODE_TYPE_AUDIO) { continue; } ProductionNode node; nRetVal = nodeInfo.GetInstance(node); CHECK_RC(nRetVal, "Get instance"); if (seekNodeType == XN_NODE_TYPE_INVALID) { //No node type specified - record all nodes. nRetVal = recorder.AddNodeToRecording(node); CHECK_RC(nRetVal, "Add node to recording"); } else if (seekNodeType == nodeInfo.GetDescription().Type) { //If node type is specified, we only record nodes of that type. nRetVal = player.SeekToFrame(node.GetName(), nStartFrame, XN_PLAYER_SEEK_SET); CHECK_RC(nRetVal, "Seek player to frame"); nRetVal = recorder.AddNodeToRecording(node); CHECK_RC(nRetVal, "Add node to recording"); } } nRetVal = player.SetRepeat(FALSE); XN_IS_STATUS_OK(nRetVal); int nFrame = 0; while ((nRetVal = context.WaitAnyUpdateAll()) != XN_STATUS_EOF) { CHECK_RC(nRetVal, "Read next frame"); printf("Recording: %u\r", nFrame++); if ((seekNodeType != XN_NODE_TYPE_INVALID) && (nFrame == nEndFrame)) { break; } } player.Release(); context.Release(); return 0; }