XnStatus MapWatcher::NotifySupportedOutputModes() { XnUInt32 nOutputModes = m_mapGenerator.GetSupportedMapOutputModesCount(); if (nOutputModes == 0) { return XN_STATUS_ERROR; } XnStatus nRetVal = NotifyIntPropChanged(XN_PROP_SUPPORTED_MAP_OUTPUT_MODES_COUNT, nOutputModes); XN_IS_STATUS_OK(nRetVal); XnMapOutputMode *pOutputModes = XN_NEW_ARR(XnMapOutputMode, nOutputModes); XN_VALIDATE_ALLOC_PTR(pOutputModes); nRetVal = m_mapGenerator.GetSupportedMapOutputModes(pOutputModes, nOutputModes); if (nRetVal != XN_STATUS_OK) { XN_DELETE_ARR(pOutputModes); return nRetVal; } nRetVal = NotifyGeneralPropChanged(XN_PROP_SUPPORTED_MAP_OUTPUT_MODES, sizeof(pOutputModes[0]) * nOutputModes, pOutputModes); if (nRetVal != XN_STATUS_OK) { XN_DELETE_ARR(pOutputModes); return nRetVal; } XN_DELETE_ARR(pOutputModes); return XN_STATUS_OK; }
XnStatus AudioWatcher::NotifySupportedOutputModes() { XnUInt32 nModes = m_audioGenerator.GetSupportedWaveOutputModesCount(); if (nModes == 0) { return XN_STATUS_ERROR; } XnStatus nRetVal = NotifyIntPropChanged(XN_PROP_WAVE_SUPPORTED_OUTPUT_MODES_COUNT, nModes); XN_IS_STATUS_OK(nRetVal); XnWaveOutputMode *pSupportedModes = XN_NEW_ARR(XnWaveOutputMode, nModes); XN_VALIDATE_ALLOC_PTR(pSupportedModes); nRetVal = m_audioGenerator.GetSupportedWaveOutputModes(pSupportedModes, nModes); if (nRetVal != XN_STATUS_OK) { XN_DELETE_ARR(pSupportedModes); return nRetVal; } nRetVal = NotifyGeneralPropChanged(XN_PROP_WAVE_SUPPORTED_OUTPUT_MODES, nModes * sizeof(XnWaveOutputMode), pSupportedModes); if (nRetVal != XN_STATUS_OK) { XN_DELETE_ARR(pSupportedModes); return nRetVal; } XN_DELETE_ARR(pSupportedModes); return XN_STATUS_OK; }
XnStatus PlayerNode::Destroy() { CloseStream(); //Don't verify return value - proceed anyway if (m_pNodeInfoMap != NULL) { for (XnUInt32 i = 0; i < m_nMaxNodes; i++) { RemovePlayerNodeInfo(i); } XN_DELETE_ARR(m_pNodeInfoMap); m_pNodeInfoMap = NULL; } if (m_aSeekTempArray != NULL) { xnOSFree(m_aSeekTempArray); m_aSeekTempArray = NULL; } XN_DELETE_ARR(m_pRecordBuffer); m_pRecordBuffer = NULL; XN_DELETE_ARR(m_pUncompressedData); m_pUncompressedData = NULL; return XN_STATUS_OK; }
XnStatus DepthWatcher::NotifyUserPositions() { XnUInt32 nUserPositions = m_depthGenerator.GetUserPositionCap().GetSupportedUserPositionsCount(); if (nUserPositions == 0) { return XN_STATUS_NOT_IMPLEMENTED; } XnStatus nRetVal = NotifyIntPropChanged(XN_PROP_SUPPORTED_USER_POSITIONS_COUNT, nUserPositions); XN_IS_STATUS_OK(nRetVal); XnBoundingBox3D *pPositions = XN_NEW_ARR(XnBoundingBox3D, nUserPositions); XN_VALIDATE_ALLOC_PTR(pPositions); XnUInt32 i; for (i = 0; i < nUserPositions; i++) { nRetVal = m_depthGenerator.GetUserPositionCap().GetUserPosition(i, pPositions[i]); if (nRetVal != XN_STATUS_OK) { XN_DELETE_ARR(pPositions); return nRetVal; } } nRetVal = NotifyGeneralPropChanged(XN_PROP_USER_POSITIONS, sizeof(pPositions[0]) * nUserPositions, pPositions); if (nRetVal != XN_STATUS_OK) { XN_DELETE_ARR(pPositions); return nRetVal; } XN_DELETE_ARR(pPositions); return XN_STATUS_OK; }
XnStatus PlayerNode::OpenStream() { XN_VALIDATE_INPUT_PTR(m_pInputStream); XnStatus nRetVal = m_pInputStream->Open(m_pStreamCookie); XN_IS_STATUS_OK(nRetVal); RecordingHeader header; XnUInt32 nBytesRead = 0; nRetVal = m_pInputStream->Read(m_pStreamCookie, &header, sizeof(header), &nBytesRead); XN_IS_STATUS_OK(nRetVal); if (nBytesRead < sizeof(header)) { XN_LOG_ERROR_RETURN(XN_STATUS_CORRUPT_FILE, XN_MASK_OPEN_NI, "Not enough bytes read"); } /* Check header */ if (xnOSMemCmp(header.headerMagic, DEFAULT_RECORDING_HEADER.headerMagic, sizeof(header.headerMagic)) != 0) { XN_LOG_ERROR_RETURN(XN_STATUS_CORRUPT_FILE, XN_MASK_OPEN_NI, "Invalid header magic"); } if ((xnVersionCompare(&header.version, &OLDEST_SUPPORTED_FILE_FORMAT_VERSION) < 0) || //File format is too old (xnVersionCompare(&header.version, &DEFAULT_RECORDING_HEADER.version) > 0)) //File format is too new { XN_LOG_ERROR_RETURN(XN_STATUS_UNSUPPORTED_VERSION, XN_MASK_OPEN_NI, "Unsupported file format version: %u.%u.%u.%u", header.version.nMajor, header.version.nMinor, header.version.nMaintenance, header.version.nBuild); } m_nGlobalMaxTimeStamp = header.nGlobalMaxTimeStamp; m_nMaxNodes = header.nMaxNodeID + 1; XN_ASSERT(m_nMaxNodes > 0); XN_DELETE_ARR(m_pNodeInfoMap); xnOSFree(m_aSeekTempArray); m_pNodeInfoMap = XN_NEW_ARR(PlayerNodeInfo, m_nMaxNodes); XN_VALIDATE_ALLOC_PTR(m_pNodeInfoMap); XN_VALIDATE_CALLOC(m_aSeekTempArray, DataIndexEntry*, m_nMaxNodes); m_bOpen = TRUE; nRetVal = ProcessUntilFirstData(); if (nRetVal != XN_STATUS_OK) { XN_DELETE_ARR(m_pNodeInfoMap); m_pNodeInfoMap = NULL; xnOSFree(m_aSeekTempArray); m_aSeekTempArray = NULL; return nRetVal; } return XN_STATUS_OK; }
PoseUserSelector::~PoseUserSelector() { m_pTrackingInitializer = NULL; if(m_pUserGenerator != NULL) { if(m_pUserGenerator->IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) { for(UserStateHash::Iterator iter = m_hUsersState.begin(); iter != m_hUsersState.end(); ++iter) { m_pUserGenerator->GetPoseDetectionCap().StopPoseDetection(iter.Key()); } if(m_hPoseDetectCallback != NULL) { m_pUserGenerator->GetPoseDetectionCap().UnregisterFromPoseDetected(m_hPoseDetectCallback); m_hPoseDetectCallback = NULL; } if(m_hPoseInProgressCallback != NULL) { m_pUserGenerator->GetPoseDetectionCap().UnregisterFromPoseInProgress(m_hPoseInProgressCallback); m_hPoseInProgressCallback = NULL; } } m_pUserGenerator = NULL; } if(m_strPoseToTrack != NULL) { XN_DELETE_ARR(m_strPoseToTrack); m_strPoseToTrack = NULL; } }
// Stream VideoStream::~VideoStream() { // Make sure stream is stopped. stop(); m_device.clearStream(this); // Detach all recorders from this stream. xnl::LockGuard< Recorders > guard(m_recorders); while (m_recorders.Begin() != m_recorders.End()) { // NOTE: DetachStream has a side effect of modifying m_recorders. m_recorders.Begin()->Value()->detachStream(*this); } // Try to close the thread properly, and forcibly terminate it if failed/timedout. m_running = false; xnOSSetEvent(m_newFrameInternalEvent); xnOSSetEvent(m_newFrameInternalEventForFrameHolder); XnStatus rc = xnOSWaitForThreadExit(m_newFrameThread, STREAM_DESTROY_THREAD_TIMEOUT); if (rc != XN_STATUS_OK) { xnOSTerminateThread(&m_newFrameThread); } m_pFrameHolder->setStreamEnabled(this, FALSE); m_driverHandler.deviceDestroyStream(m_device.getHandle(), m_streamHandle); xnOSCloseEvent(&m_newFrameInternalEvent); xnOSCloseEvent(&m_newFrameInternalEventForFrameHolder); XN_DELETE_ARR(m_pSensorInfo->pSupportedVideoModes); XN_DELETE(m_pSensorInfo); }
RecordAssembler::~RecordAssembler() { if (NULL != m_pBuffer) { XN_DELETE_ARR(m_pBuffer); } }
void Recorder::onRecord(XnUInt32 nodeId, XnCodecBase* pCodec, const OniFrame* pFrame, XnUInt32 frameId, XnUInt64 timestamp) { if (0 == nodeId || NULL == pFrame) { return; } FIND_ATTACHED_STREAM_INFO(nodeId) if (!pInfo) return; Memento undoPoint(this); if (NULL != pCodec) { XnUInt32 bufferSize_bytes32 = pFrame->dataSize * 2 + pCodec->GetOverheadSize(); XnUInt8* buffer = XN_NEW_ARR(XnUInt8, bufferSize_bytes32); XnStatus status = pCodec->Compress(reinterpret_cast<const XnUChar*>(pFrame->data), pFrame->dataSize, buffer, &bufferSize_bytes32); XnSizeT bufferSize_bytes = bufferSize_bytes32; if (XN_STATUS_OK == status) { EMIT(RECORD_NEW_DATA( nodeId, pInfo->lastNewDataRecordPosition, timestamp, frameId, buffer, bufferSize_bytes)) } XN_DELETE_ARR(buffer); }
LinkOniMapStream::~LinkOniMapStream() { if (m_aSupportedModes != NULL) { XN_DELETE_ARR(m_aSupportedModes); m_aSupportedModes = NULL; } }
ClosestUserSelector::~ClosestUserSelector() { m_pTrackingInitializer=NULL; if(m_pUsersList!=NULL) { XN_DELETE_ARR(m_pUsersList); m_pUsersList=NULL; } }
XnStatus XnServerSensorInvoker::GetStreamMaxResolution(SensorInvokerStream* pStream, XnUInt32& nMaxNumPixels) { XnStatus nRetVal = XN_STATUS_OK; XnUInt64 nCount = 0; nRetVal = m_sensor.GetProperty(pStream->strType, XN_STREAM_PROPERTY_SUPPORT_MODES_COUNT, &nCount); XN_IS_STATUS_OK(nRetVal); XnCmosPreset* aPresets = XN_NEW_ARR(XnCmosPreset, nCount); nRetVal = m_sensor.GetProperty(pStream->strType, XN_STREAM_PROPERTY_SUPPORT_MODES, XnGeneralBufferPack(aPresets, nCount * sizeof(XnCmosPreset))); if (nRetVal != XN_STATUS_OK) { XN_DELETE_ARR(aPresets); return nRetVal; } XnUInt32 nMaxPixels = 0; for (XnUInt32 i = 0; i < nCount; ++i) { XnUInt32 nXRes; XnUInt32 nYRes; if (!XnDDKGetXYFromResolution((XnResolutions)aPresets[i].nResolution, &nXRes, &nYRes)) { continue; } if (nXRes * nYRes > nMaxPixels) { nMaxPixels = nXRes * nYRes; } } XN_ASSERT(nMaxPixels > 0); XN_DELETE_ARR(aPresets); nMaxNumPixels = nMaxPixels; return (XN_STATUS_OK); }
XnNodeManager::~XnNodeManager() { while (m_pAllNodes->begin() != m_pAllNodes->end()) { XnValue RemovedValue; m_pAllNodes->Remove(m_pAllNodes->rbegin(), RemovedValue); XnNode* pNodes = (XnNode*)RemovedValue; XN_DELETE_ARR(pNodes); } XN_DELETE(m_pAllNodes); xnOSCloseCriticalSection(&m_hCriticalSection); }
XnStatus MockDepthGenerator::SetGeneralProperty(const XnChar* strName, XnUInt32 nBufferSize, const void* pBuffer) { XN_VALIDATE_INPUT_PTR(strName); XN_VALIDATE_INPUT_PTR(pBuffer); XnStatus nRetVal = XN_STATUS_OK; if (strcmp(strName, XN_PROP_FIELD_OF_VIEW) == 0) { if (nBufferSize != sizeof(XnFieldOfView)) { XN_LOG_ERROR_RETURN(XN_STATUS_INVALID_BUFFER_SIZE, XN_MASK_OPEN_NI, "Cannot set XN_PROP_FIELD_OF_VIEW - buffer size is incorrect"); } const XnFieldOfView* pFOV = (const XnFieldOfView*)pBuffer; nRetVal = SetFieldOfView(*pFOV); XN_IS_STATUS_OK(nRetVal); } else if (strcmp(strName, XN_PROP_USER_POSITIONS) == 0) { if (m_bSupportedUserPositionsCountReceived) { m_bSupportedUserPositionsCountReceived = FALSE; if (nBufferSize != m_nSupportedUserPositionsCount * sizeof(XnBoundingBox3D)) { XN_LOG_ERROR_RETURN(XN_STATUS_INVALID_BUFFER_SIZE, XN_MASK_OPEN_NI, "Cannot set XN_PROP_USER_POSITIONS - buffer size is incorrect"); } XN_DELETE_ARR(m_pUserPositions); m_pUserPositions = XN_NEW_ARR(XnBoundingBox3D, m_nSupportedUserPositionsCount); XN_VALIDATE_ALLOC_PTR(m_pUserPositions); xnOSMemCopy(m_pUserPositions, pBuffer, nBufferSize); } else { /*We got XN_PROP_USER_POSITIONS without XN_PROP_SUPPORTED_USER_POSITIONS_COUNT before it - that's an error*/ XN_ASSERT(FALSE); XN_LOG_ERROR_RETURN(XN_STATUS_ERROR, XN_MASK_OPEN_NI, "got XN_PROP_USER_POSITIONS without XN_PROP_SUPPORTED_USER_POSITIONS_COUNT before it.") } } else {
OniStatus Context::releaseDeviceList(OniDeviceInfo* pDevices) { XN_DELETE_ARR(pDevices); return ONI_STATUS_OK; }
MockDepthGenerator::~MockDepthGenerator() { XN_DELETE_ARR(m_pUserPositions); }
// Start recording (once per recorder) //---------------------------------------- bool ofxOpenNIRecorder::startRecord(string sName) { // make sure we don't re-instantiate if we're already recording in stream mode if(is_recording && config.record_type == ONI_STREAMING) { return false; } xn::MockDepthGenerator m_depth; xn::MockImageGenerator m_image; xn::MockIRGenerator m_ir; // reset dropped frame counting variables nLastDepthTime = 0; nLastImageTime = 0; nMissedDepthFrames = 0; nMissedImageFrames = 0; nDepthFrames = 0; nImageFrames = 0; XnStatus result; // set the record file name config.record_name = ofToDataPath(sName, false); bool do_init = false; // by using this do_init method the interface // is transparent to users whichever way we are recording // the second call to startRecording when ONI_CYCLING // dumps the buffer to file (see below stopRecord() if (config.record_type == ONI_CYCLIC && !is_recording) { printf("Start cyclic recording: %s\n", config.record_name.c_str()); // reset cyclic recording variables m_nNextWrite = 0; m_nBufferCount = 0; is_recording = true; } else do_init = true; if (do_init) { // recorder init result = recorder.Create(context->getXnContext()); CHECK_RC(result, "Recorder create"); result = recorder.SetDestination(XN_RECORD_MEDIUM_FILE, config.record_name.c_str()); CHECK_RC(result, "Recorder set destination"); if (config.record_depth) { if (config.record_type == ONI_STREAMING) { // just use the depth generator as the node to record result = recorder.AddNodeToRecording(depth_generator, XN_CODEC_16Z); // XN_CODEC_16Z_EMB_TABLES is smaller, but seems XN_CODEC_16Z is smoother CHECK_RC(result, "Recorder add depth node"); } else if (config.record_type == ONI_CYCLIC) { // create a mock node based on the depth generator to record result = context->getXnContext().CreateMockNodeBasedOn(depth_generator, NULL, m_depth); CHECK_RC(result, "Create depth node"); result = recorder.AddNodeToRecording(m_depth, XN_CODEC_16Z); // XN_CODEC_16Z_EMB_TABLES is smaller, but seems XN_CODEC_16Z is smoother CHECK_RC(result, "Recorder add depth node"); } } // create image node if (config.record_image) { if (config.record_type == ONI_STREAMING) { // just use the image generator as the node to record result = recorder.AddNodeToRecording(image_generator, XN_CODEC_NULL); // XN_CODEC_NULL appears to give least frame drops and size not much > JPEG CHECK_RC(result, "Recorder add image node"); } else if (config.record_type == ONI_CYCLIC) { // create a mock node based on the image generator to record result = context->getXnContext().CreateMockNodeBasedOn(image_generator, NULL, m_image); CHECK_RC(result, "Create image node"); result = recorder.AddNodeToRecording(m_image, XN_CODEC_NULL); // XN_CODEC_NULL appears to give least frame drops and size not much > JPEG CHECK_RC(result, "Recorder add image node"); } } // create ir node if (config.record_ir) { if (config.record_type == ONI_STREAMING) { // just use the image generator as the node to record result = recorder.AddNodeToRecording(ir_generator, XN_CODEC_NULL); // XN_CODEC_NULL appears to give least frame drops and size not much > JPEG CHECK_RC(result, "Recorder add ir node"); } else if (config.record_type == ONI_CYCLIC) { // create a mock node based on the image generator to record result = context->getXnContext().CreateMockNodeBasedOn(ir_generator, NULL, m_ir); CHECK_RC(result, "Create ir node"); result = recorder.AddNodeToRecording(m_ir, XN_CODEC_NULL); // XN_CODEC_NULL appears to give least frame drops and size not much > JPEG CHECK_RC(result, "Recorder add ir node"); } } // Frame sync is currently not possible with Kinect cameras!! // if we try to frame sync then recording fails #ifndef USINGKINECT // Frame Sync if(xn_depth.IsCapabilitySupported(XN_CAPABILITY_FRAME_SYNC)) { if(depth_generator.GetFrameSyncCap().CanFrameSyncWith(image_generator)) { result = depth_generator.GetFrameSyncCap().FrameSyncWith(image_generator); CHECK_RC(result, "Enable frame sync"); } } #endif if (config.record_type == ONI_STREAMING) { printf("Start streaming recording: %s\n", config.record_name.c_str()); is_recording = true; } else if (config.record_type == ONI_CYCLIC && is_recording) { // Record frames from current position in cyclic buffer loop through to the end if (m_nNextWrite < m_nBufferCount) { // Not first loop, right till end for (XnUInt32 i = m_nNextWrite; i < m_nBufferSize; ++i) { if (config.record_depth) m_depth.SetData(frames[i].depth_frame); if (config.record_image) m_image.SetData(frames[i].image_frame); if (config.record_ir) m_ir.SetData(frames[i].ir_frame); recorder.Record(); } } // Write frames from the beginning of the buffer to the last one written for (XnUInt32 i = 0; i < m_nNextWrite; ++i) { if (config.record_depth) m_depth.SetData(frames[i].depth_frame); if (config.record_image) m_image.SetData(frames[i].image_frame); if (config.record_ir) m_ir.SetData(frames[i].ir_frame); recorder.Record(); } // cleanup recorder.Release(); m_ir.Release(); m_image.Release(); m_depth.Release(); XN_DELETE_ARR(frames); } } return true; }
// Shutdown //---------------------------------------- ofxOpenNIRecorder::~ofxOpenNIRecorder() { XN_DELETE_ARR(frames); stopRecord(); // just to be sure. }
XnStatus Context::loadLibraries(const char* directoryName) { XnStatus nRetVal; // Get a file list of Xiron devices XnInt32 nFileCount = 0; typedef XnChar FileName[XN_FILE_MAX_PATH]; FileName* acsFileList = NULL; #if (ONI_PLATFORM != ONI_PLATFORM_ANDROID_ARM) XnChar cpSearchString[XN_FILE_MAX_PATH] = ""; xnLogVerbose(XN_MASK_ONI_CONTEXT, "Looking for drivers in drivers repository '%s'", directoryName); // Build the search pattern string XN_VALIDATE_STR_APPEND(cpSearchString, directoryName, XN_FILE_MAX_PATH, nRetVal); XN_VALIDATE_STR_APPEND(cpSearchString, XN_FILE_DIR_SEP, XN_FILE_MAX_PATH, nRetVal); XN_VALIDATE_STR_APPEND(cpSearchString, XN_SHARED_LIBRARY_PREFIX, XN_FILE_MAX_PATH, nRetVal); XN_VALIDATE_STR_APPEND(cpSearchString, XN_FILE_ALL_WILDCARD, XN_FILE_MAX_PATH, nRetVal); XN_VALIDATE_STR_APPEND(cpSearchString, XN_SHARED_LIBRARY_POSTFIX, XN_FILE_MAX_PATH, nRetVal); nRetVal = xnOSCountFiles(cpSearchString, &nFileCount); if (nRetVal != XN_STATUS_OK || nFileCount == 0) { xnLogError(XN_MASK_ONI_CONTEXT, "Found no drivers matching '%s'", cpSearchString); m_errorLogger.Append("Found no files matching '%s'", cpSearchString); return XN_STATUS_NO_MODULES_FOUND; } acsFileList = XN_NEW_ARR(FileName, nFileCount); nRetVal = xnOSGetFileList(cpSearchString, NULL, acsFileList, nFileCount, &nFileCount); #else // Android nFileCount = 3; acsFileList = XN_NEW_ARR(FileName, nFileCount); strcpy(acsFileList[0], "libPS1080.so"); strcpy(acsFileList[1], "libOniFile.so"); strcpy(acsFileList[2], "libPSLink.so"); #endif // Save directory XnChar workingDir[XN_FILE_MAX_PATH]; xnOSGetCurrentDir(workingDir, XN_FILE_MAX_PATH); // Change directory xnOSSetCurrentDir(directoryName); for (int i = 0; i < nFileCount; ++i) { DeviceDriver* pDeviceDriver = XN_NEW(DeviceDriver, acsFileList[i], m_frameManager, m_errorLogger); if (pDeviceDriver == NULL || !pDeviceDriver->isValid()) { xnLogVerbose(XN_MASK_ONI_CONTEXT, "Couldn't use file '%s' as a device driver", acsFileList[i]); m_errorLogger.Append("Couldn't understand file '%s' as a device driver", acsFileList[i]); XN_DELETE(pDeviceDriver); continue; } OniCallbackHandle dummy; pDeviceDriver->registerDeviceConnectedCallback(deviceDriver_DeviceConnected, this, dummy); pDeviceDriver->registerDeviceDisconnectedCallback(deviceDriver_DeviceDisconnected, this, dummy); pDeviceDriver->registerDeviceStateChangedCallback(deviceDriver_DeviceStateChanged, this, dummy); if (!pDeviceDriver->initialize()) { xnLogVerbose(XN_MASK_ONI_CONTEXT, "Couldn't use file '%s' as a device driver", acsFileList[i]); m_errorLogger.Append("Couldn't initialize device driver from file '%s'", acsFileList[i]); XN_DELETE(pDeviceDriver); continue; } m_cs.Lock(); m_deviceDrivers.AddLast(pDeviceDriver); m_cs.Unlock(); } // Return to directory xnOSSetCurrentDir(workingDir); if (m_deviceDrivers.Size() == 0) { xnLogError(XN_MASK_ONI_CONTEXT, "Found no valid drivers"); m_errorLogger.Append("Found no valid drivers in '%s'", directoryName); return XN_STATUS_NO_MODULES_FOUND; } XN_DELETE_ARR(acsFileList); return XN_STATUS_OK; }
XnStatus XnServerSensorInvoker::SetStreamSharedMemory(SensorInvokerStream* pStream) { XnStatus nRetVal = XN_STATUS_OK; // give shared memory a name (to make the name unique, we'll add process ID) XN_PROCESS_ID procID; xnOSGetCurrentProcessID(&procID); XnChar strSharedMemoryName[XN_FILE_MAX_PATH]; sprintf(strSharedMemoryName, "%u_%s_%s", (XnUInt32)procID, m_sensor.GetUSBPath(), pStream->strType); nRetVal = pStream->pSharedMemoryName->UnsafeUpdateValue(strSharedMemoryName); XN_IS_STATUS_OK(nRetVal); XnUInt32 nBufferSize = 0; XnUInt32 nPixelSize = 0; if (strcmp(pStream->strType, XN_STREAM_TYPE_DEPTH) == 0) { // have space for depth and shift values nPixelSize = sizeof(XnDepthPixel) + sizeof(XnUInt16); } else if (strcmp(pStream->strType, XN_STREAM_TYPE_IMAGE) == 0) { // biggest pixel size is the RGB24 nPixelSize = sizeof(XnRGB24Pixel); } else if (strcmp(pStream->strType, XN_STREAM_TYPE_IR) == 0) { // biggest pixel size is the RGB24 nPixelSize = sizeof(XnIRPixel); } else { XN_ASSERT(FALSE); return XN_STATUS_ERROR; } // find out max resolution XnUInt32 nMaxNumPixels = 0; nRetVal = GetStreamMaxResolution(pStream, nMaxNumPixels); XN_IS_STATUS_OK(nRetVal); nBufferSize = (XnUInt32)(nMaxNumPixels * nPixelSize * m_numberOfBuffers.GetValue()); // allocate shared memory nRetVal = xnOSCreateSharedMemoryEx(strSharedMemoryName, nBufferSize, XN_OS_FILE_READ | XN_OS_FILE_WRITE, m_allowOtherUsers.GetValue() == TRUE, &pStream->hSharedMemory); XN_IS_STATUS_OK(nRetVal); nRetVal = xnOSSharedMemoryGetAddress(pStream->hSharedMemory, (void**)&pStream->pSharedMemoryAddress); XN_IS_STATUS_OK(nRetVal); // Set buffer pool for this stream XnGeneralBuffer* aBuffers = XN_NEW_ARR(XnGeneralBuffer, m_numberOfBuffers.GetValue()); XnUInt32 nSingleBufferSize = nBufferSize / m_numberOfBuffers.GetValue(); for (XnUInt32 i = 0; i < m_numberOfBuffers.GetValue(); ++i) { aBuffers[i].pData = pStream->pSharedMemoryAddress + (i * nSingleBufferSize); aBuffers[i].nDataSize = nSingleBufferSize; } nRetVal = m_sensor.SetProperty(pStream->strType, XN_STREAM_PROPERTY_EXTERNAL_BUFFER_POOL, XnGeneralBufferPack(aBuffers, m_numberOfBuffers.GetValue() * sizeof(XnGeneralBuffer))); XN_DELETE_ARR(aBuffers); XN_IS_STATUS_OK(nRetVal); return (XN_STATUS_OK); }
XnStatus MockMapGenerator::SetGeneralProperty(const XnChar* strName, XnUInt32 nBufferSize, const void* pBuffer) { XN_VALIDATE_INPUT_PTR(strName); XN_VALIDATE_INPUT_PTR(pBuffer); XnStatus nRetVal = XN_STATUS_OK; if (strcmp(strName, XN_PROP_MAP_OUTPUT_MODE) == 0) { if (nBufferSize != sizeof(m_mapOutputMode)) { XN_LOG_ERROR_RETURN(XN_STATUS_INVALID_BUFFER_SIZE, XN_MASK_OPEN_NI, "Cannot set XN_PROP_MAP_OUTPUT_MODE - buffer size is incorrect"); } const XnMapOutputMode* pOutputMode = (const XnMapOutputMode*)pBuffer; nRetVal = SetMapOutputMode(*pOutputMode); XN_IS_STATUS_OK(nRetVal); } else if (strcmp(strName, XN_PROP_SUPPORTED_MAP_OUTPUT_MODES) == 0) { if (m_bSupportedMapOutputModesCountReceived) { m_bSupportedMapOutputModesCountReceived = FALSE; //For next time if (nBufferSize != m_nSupportedMapOutputModesCount * sizeof(XnMapOutputMode)) { XN_LOG_ERROR_RETURN(XN_STATUS_INVALID_BUFFER_SIZE, XN_MASK_OPEN_NI, "Cannot set XN_PROP_SUPPORTED_MAP_OUTPUT_MODES - buffer size is incorrect"); } XN_DELETE_ARR(m_pSupportedMapOutputModes); m_pSupportedMapOutputModes = XN_NEW_ARR(XnMapOutputMode, m_nSupportedMapOutputModesCount); XN_VALIDATE_ALLOC_PTR(m_pSupportedMapOutputModes); xnOSMemCopy(m_pSupportedMapOutputModes, pBuffer, nBufferSize); } else { XN_ASSERT(FALSE); XN_LOG_ERROR_RETURN(XN_STATUS_CORRUPT_FILE, XN_MASK_OPEN_NI, "Got XN_PROP_SUPPORTED_MAP_OUTPUT_MODES without XN_PROP_SUPPORTED_MAP_OUTPUT_MODES_COUNT before it"); } } else if (strcmp(strName, XN_PROP_CROPPING) == 0) { if (nBufferSize != sizeof(m_cropping)) { XN_LOG_ERROR_RETURN(XN_STATUS_INVALID_BUFFER_SIZE, XN_MASK_OPEN_NI, "Cannot set XN_PROP_CROPPING - buffer size is incorrect"); } const XnCropping* pCropping = (const XnCropping*)pBuffer; nRetVal = SetCropping(*pCropping); XN_IS_STATUS_OK(nRetVal); } else if (strcmp(strName, XN_PROP_NEWDATA) == 0) { XnUInt32 nExpectedSize = GetExpectedBufferSize(); if (nBufferSize != nExpectedSize) { xnLogWarning(XN_MASK_OPEN_NI, "%s: Got new data with illegal buffer size (%u) - ignoring.", m_strName, nBufferSize); } else { //Send it to be handled by our base class nRetVal = MockGenerator::SetGeneralProperty(strName, nBufferSize, pBuffer); XN_IS_STATUS_OK(nRetVal); } } else { nRetVal = MockGenerator::SetGeneralProperty(strName, nBufferSize, pBuffer); XN_IS_STATUS_OK(nRetVal); } return XN_STATUS_OK; }
void Recorder::onAttach(XnUInt32 nodeId, VideoStream* pStream) { if (nodeId == 0 || pStream == NULL) { return; } const OniSensorInfo* pSensorInfo = pStream->getSensorInfo(); if (pSensorInfo == NULL) { return; } // Assume we'll be using uncompressed codec. XnUInt32 codecId = ONI_CODEC_UNCOMPRESSED; // Applicable for depth streams only. int maxDepth = XN_MAX_UINT16; OniVideoMode curVideoMode; int size = sizeof(OniVideoMode); pStream->getProperty(ONI_STREAM_PROPERTY_VIDEO_MODE, &curVideoMode, &size); // Guess codec type from video mode format. switch (curVideoMode.pixelFormat) { case ONI_PIXEL_FORMAT_DEPTH_100_UM: case ONI_PIXEL_FORMAT_DEPTH_1_MM: { size = int(sizeof(maxDepth)); pStream->getProperty( ONI_STREAM_PROPERTY_MAX_VALUE, &maxDepth, &size); m_streams[pStream].pCodec = XN_NEW( Xn16zEmbTablesCodec, static_cast<XnUInt16>(maxDepth)); codecId = ONI_CODEC_16Z_EMB_TABLES; } break; case ONI_PIXEL_FORMAT_RGB888: { if (m_streams[pStream].allowLossyCompression) { m_streams[pStream].pCodec = XN_NEW( XnJpegCodec, /* bRGB = */ TRUE, curVideoMode.resolutionX, curVideoMode.resolutionY); codecId = ONI_CODEC_JPEG; } else { m_streams[pStream].pCodec = XN_NEW(XnUncompressedCodec); } } break; default: m_streams[pStream].pCodec = XN_NEW(XnUncompressedCodec); break; } // If anything went wrong - fall back to uncompressed format. if (XN_STATUS_OK != m_streams[pStream].pCodec->Init()) { XN_DELETE(m_streams[pStream].pCodec); m_streams[pStream].pCodec = NULL; codecId = ONI_CODEC_UNCOMPRESSED; } Memento undoPoint(this); // save the position of this record so we can override it upon detaching m_streams[pStream].nodeAddedRecordPosition = undoPoint.GetPosition(); EMIT(RECORD_NODE_ADDED( m_streams[pStream].nodeType = AsNodeType(pSensorInfo->sensorType), nodeId, m_streams[pStream].codecId = codecId, /* numberOfFrames = */ XN_MAX_UINT32, /* minTimeStamp = */ XN_UINT64_C(0), /* maxTimeStamp = */ XN_MAX_UINT64, /* seekTablePosition = */ XN_UINT64_C(0) )) undoPoint.Reuse(); // isGenerating (needed for OpenNI 1.x playback) EMIT(RECORD_INT_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "xnIsGenerating", undoPoint.GetPosition()), "xnIsGenerating", TRUE )); undoPoint.Reuse(); // xnDeviceMaxDepth if (curVideoMode.pixelFormat == ONI_PIXEL_FORMAT_DEPTH_1_MM || curVideoMode.pixelFormat == ONI_PIXEL_FORMAT_DEPTH_100_UM) { EMIT(RECORD_INT_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "xnDeviceMaxDepth", undoPoint.GetPosition()), "xnDeviceMaxDepth", maxDepth )) } undoPoint.Reuse(); // xnSupportedMapOutputModesCount EMIT(RECORD_INT_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "xnSupportedMapOutputModesCount", undoPoint.GetPosition()), "xnSupportedMapOutputModesCount", pSensorInfo->numSupportedVideoModes )) undoPoint.Reuse(); // xnSupportedMapOutputModes VideoModeData* pVideoModes = XN_NEW_ARR( VideoModeData, pSensorInfo->numSupportedVideoModes); for (int i = 0; i < pSensorInfo->numSupportedVideoModes; ++i) { const OniVideoMode& videoMode = pSensorInfo->pSupportedVideoModes[i]; pVideoModes[i].width = videoMode.resolutionX; pVideoModes[i].height = videoMode.resolutionY; pVideoModes[i].fps = videoMode.fps; } EMIT(RECORD_GENERAL_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "xnSupportedMapOutputModes", undoPoint.GetPosition()), "xnSupportedMapOutputModes", pVideoModes, sizeof(*pVideoModes) * pSensorInfo->numSupportedVideoModes )) undoPoint.Reuse(); // xnSupportedPixelFormats XnSupportedPixelFormats supportedPixelFormats; fillXnSupportedPixelFormats(supportedPixelFormats, curVideoMode.pixelFormat); EMIT(RECORD_GENERAL_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "xnSupportedPixelFormats", undoPoint.GetPosition()), "xnSupportedPixelFormats", &supportedPixelFormats, sizeof(supportedPixelFormats) )) undoPoint.Reuse(); // xnMapOutputMode VideoModeData curVMD; curVMD.width = curVideoMode.resolutionX; curVMD.height = curVideoMode.resolutionY; curVMD.fps = curVideoMode.fps; EMIT(RECORD_GENERAL_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "xnMapOutputMode", undoPoint.GetPosition()), "xnMapOutputMode", &curVMD, sizeof(curVMD) )) undoPoint.Reuse(); // xnPixelFormat EMIT(RECORD_INT_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "xnPixelFormat", undoPoint.GetPosition()), "xnPixelFormat", toXnPixelFormat(curVideoMode.pixelFormat) )) undoPoint.Reuse(); EMIT(RECORD_INT_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "oniPixelFormat", undoPoint.GetPosition()), "oniPixelFormat", curVideoMode.pixelFormat )) undoPoint.Reuse(); XN_DELETE_ARR(pVideoModes); size = sizeof(XnFloat); float vdummy, hdummy; if ( pStream->getProperty(ONI_STREAM_PROPERTY_HORIZONTAL_FOV, &hdummy, &size) == ONI_STATUS_OK && pStream->getProperty(ONI_STREAM_PROPERTY_VERTICAL_FOV, &vdummy, &size) == ONI_STATUS_OK ) { // xnFOV struct XnFieldOfView { /** Horizontal Field Of View, in radians. */ XnDouble fHFOV; /** Vertical Field Of View, in radians. */ XnDouble fVFOV; } fov = {hdummy, vdummy}; EMIT(RECORD_GENERAL_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "xnFOV", undoPoint.GetPosition()), "xnFOV", &fov, sizeof(fov) )) undoPoint.Reuse(); } // xnCropping struct XnCropping { /** TRUE if cropping is turned on, FALSE otherwise. */ XnBool bEnabled; /** Offset in the X-axis, in pixels. */ XnUInt16 nXOffset; /** Offset in the Y-axis, in pixels. */ XnUInt16 nYOffset; /** Number of pixels in the X-axis. */ XnUInt16 nXSize; /** Number of pixels in the Y-axis. */ XnUInt16 nYSize; } xncropping = {0}; OniCropping cropping; size = sizeof(OniCropping); if (pStream->getProperty(ONI_STREAM_PROPERTY_CROPPING, &cropping, &size) == ONI_STATUS_OK) { // we support cropping capability EMIT(RECORD_INT_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "Cropping", undoPoint.GetPosition()), "Cropping", TRUE )); undoPoint.Reuse(); xncropping.bEnabled = cropping.enabled; xncropping.nXOffset = (XnUInt16)cropping.originX; xncropping.nYOffset = (XnUInt16)cropping.originY; xncropping.nXSize = (XnUInt16)cropping.width; xncropping.nYSize = (XnUInt16)cropping.height; EMIT(RECORD_GENERAL_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "xnCropping", undoPoint.GetPosition()), "xnCropping", &xncropping, sizeof(xncropping) )) undoPoint.Reuse(); } OniBool bMirror = FALSE; size = sizeof(bMirror); if (pStream->getProperty(ONI_STREAM_PROPERTY_MIRRORING, &bMirror, &size) == ONI_STATUS_OK) { // we support mirroring capability EMIT(RECORD_INT_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "Mirror", undoPoint.GetPosition()), "Mirror", TRUE )); undoPoint.Reuse(); // and now tell the mirror state EMIT(RECORD_INT_PROPERTY( nodeId, getLastPropertyRecordPos(nodeId, "xnMirror", undoPoint.GetPosition()), "xnMirror", bMirror )) undoPoint.Reuse(); } m_propertyPriority = ms_priorityHigh; pStream->notifyAllProperties(); m_propertyPriority = ms_priorityNormal; undoPoint.Release(); }
MockMapGenerator::~MockMapGenerator() { XN_DELETE_ARR(m_pSupportedMapOutputModes); }