void changeRegistration(int nValue) { if (!g_Depth.IsValid() || !g_Depth.IsCapabilitySupported(XN_CAPABILITY_ALTERNATIVE_VIEW_POINT)) { return; } if (!nValue) { g_Depth.GetAlternativeViewPointCap().ResetViewPoint(); } else if (g_Image.IsValid()) { g_Depth.GetAlternativeViewPointCap().SetViewPoint(g_Image); } }
bool getImageCoordinatesForDepthPixel(int x, int y, int& imageX, int& imageY) { if (!g_Depth.IsValid()) return false; // no depth if (!g_Image.IsValid()) return false; // no image if (!g_Depth.IsCapabilitySupported(XN_CAPABILITY_ALTERNATIVE_VIEW_POINT)) return false; XnUInt32 altX; XnUInt32 altY; if (XN_STATUS_OK != g_Depth.GetAlternativeViewPointCap().GetPixelCoordinatesInViewPoint(g_Image, x, y, altX, altY)) return false; imageX = (int)altX; imageY = (int)altY; return true; }
int Init() { XnStatus rc; //Make sure our image types are the same as the OpenNI image types. assert(sizeof(XnRGB24Pixel) == sizeof(ColorPixel)); assert(sizeof(XnDepthPixel) == sizeof(DepthPixel)); assert(sizeof(XnStatus) == sizeof(int)); // Load OpenNI xml settings char filePath[255]; int length = Util::Helpers::GetExeDirectory(filePath, sizeof(filePath)); filePath[length] = '\\'; strcpy(&filePath[length+1], SAMPLE_XML_PATH); EnumerationErrors errors; rc = deviceContext.InitFromXmlFile(filePath, &errors); if (rc == XN_STATUS_NO_NODE_PRESENT) { //One reason would be if Microsoft SDK is installed beside PrimeSense. Device manager should say PrimeSense instead of Microsoft Kinect. //XnChar strError[1024]; //errors.ToString(strError, 1024); //LOGE("%s\n", strError); return -1; } else if (rc != XN_STATUS_OK) { fprintf(stderr, "%s\n", xnGetStatusString(rc)); /*LOGE("Open failed: %s\n", xnGetStatusString(rc));*/ return (rc); } // Retrieve colour and depth nodes rc = deviceContext.FindExistingNode(XN_NODE_TYPE_IMAGE, colorImageGenerator); rc = deviceContext.FindExistingNode(XN_NODE_TYPE_DEPTH, depthImageGenerator); // Set mirror mode to off SetMirrorMode(false); // Get a frame to perform checks on it ImageMetaData colorImageMetaData; DepthMetaData depthImageMetaData; depthImageGenerator.GetMetaData(depthImageMetaData); colorImageGenerator.GetMetaData(colorImageMetaData); // Hybrid mode isn't supported in this sample if (colorImageMetaData.FullXRes() != depthImageMetaData.FullXRes() || colorImageMetaData.FullYRes() != depthImageMetaData.FullYRes()) { /*LOGE("The device depth and image resolution must be equal!\n");*/ return 1; } // RGB is the only image format supported. if (colorImageMetaData.PixelFormat() != XN_PIXEL_FORMAT_RGB24) { /*LOGE("The device image format must be RGB24\n");*/ return 1; } // Need to make sure the automatic alignment of colour and depth images is supported. XnBool isSupported = depthImageGenerator.IsCapabilitySupported("AlternativeViewPoint"); if(!isSupported) { /*LOGE("Cannot set AlternativeViewPoint!\n");*/ return 1; } // Set it to VGA maps at 30 FPS /*XnMapOutputMode mapMode; mapMode.nXRes = XN_VGA_X_RES; mapMode.nYRes = XN_VGA_Y_RES; mapMode.nFPS = 60; rc = g_depth.SetMapOutputMode(mapMode); if(rc) { LOGE("Failed to set depth map mode: %s\n", xnGetStatusString(rc)); return 1; } mapMode.nFPS = 30; rc = g_image.SetMapOutputMode(mapMode); if(rc) { LOGE("Failed to set image map mode: %s\n", xnGetStatusString(rc)); return 1; }*/ // Set automatic alignment of the colour and depth images. rc = depthImageGenerator.GetAlternativeViewPointCap().SetViewPoint(colorImageGenerator); if(rc) { /*LOGE("Failed to set depth map mode: %s\n", xnGetStatusString(rc));*/ return 1; } return XN_STATUS_OK; }
void OpenNIRecorder::start( uint64_t nodeTypeFlags, std::string& filename ) { XnStatus result; // Create recorder result = mRecorder.Create( *mDevice->getContext() ); //->CreateAnyProductionTree( XN_NODE_TYPE_RECORDER, NULL, mRecorder, &mErrors ); CHECK_RC( result, "[Recorder] Create recorder" ); //CHECK_RC_ERR( result, "Create recorder", mErrors ); result = mRecorder.SetDestination(XN_RECORD_MEDIUM_FILE, filename.c_str() ); CHECK_RC( result, "[Recorder] Set destination" ); ImageGenerator* image = NULL; IRGenerator* ir = NULL; DepthGenerator* depth = NULL; if( nodeTypeFlags & NODE_TYPE_IMAGE ) { image = mDevice->getImageGenerator(); mRecorder.AddNodeToRecording( *image ); //, XN_CODEC_JPEG ); } if( nodeTypeFlags & NODE_TYPE_IR ) { ir = mDevice->getIRGenerator(); mRecorder.AddNodeToRecording( *ir ); //, XN_CODEC_JPEG ); } if( nodeTypeFlags & NODE_TYPE_DEPTH ) { depth = mDevice->getDepthGenerator(); mRecorder.AddNodeToRecording( *depth ); //, XN_CODEC_16Z_EMB_TABLES ); } if( nodeTypeFlags & NODE_TYPE_USER ) { } // Check for frame synchronization if( depth->IsCapabilitySupported( XN_CAPABILITY_FRAME_SYNC ) ) { if( (mNodeFlags&NODE_TYPE_DEPTH) && (mNodeFlags&NODE_TYPE_IMAGE) ) { if( depth->GetFrameSyncCap().CanFrameSyncWith( *image ) ) { result = depth->GetFrameSyncCap().FrameSyncWith( *image ); CHECK_RC( result, "Enable frame sync"); } } else if( (mNodeFlags&NODE_TYPE_DEPTH) && (mNodeFlags&NODE_TYPE_IR) ) { if( depth->GetFrameSyncCap().CanFrameSyncWith( *ir ) ) { result = depth->GetFrameSyncCap().FrameSyncWith( *ir ); CHECK_RC( result, "Enable frame sync"); } } } // If everything is ok, set to record. mIsRecording = true; mIsPaused = false; }