コード例 #1
0
SceneDrawer::SceneDrawer()
{
    // drawing defaults
    g_bDrawBackground = TRUE;
    g_bDrawPixels = TRUE;
    g_bDrawSkeleton = TRUE;
    g_bPrintID = TRUE;
    g_bPrintState = TRUE;
    g_bPause=FALSE;

    // buffer initialization
    pLimbsPosArr=XN_NEW_ARR(XnPoint3D,(MAX_LIMBS*2));
    pConfidenceArr=XN_NEW_ARR(XnFloat,MAX_LIMBS);

    // following are dummy assignments which will be overriden when DrawScene is called
    // (either in DrawScene itself or in InitTexture
    m_pUserTrackerObj=NULL;
    depthTexID=0;
    pDepthTexBuf=NULL;
    texWidth=0;
    texHeight=0;
    for(int i=0; i<8; i++)
        texcoords[i] = 0;

}
コード例 #2
0
	DepthSenseDevice( OniDeviceInfo* pInfo, oni::driver::DriverServices& driverServices,
		      DepthSense::Context& context, DepthSense::Device& device )
		: m_pInfo(pInfo)
		, m_driverServices(driverServices)
		, m_context( context )
		, m_device( device )
	{
		m_numSensors = 2;

		m_sensors[0].pSupportedVideoModes = XN_NEW_ARR(OniVideoMode, 1);
		m_sensors[0].sensorType = ONI_SENSOR_DEPTH;
		m_sensors[0].numSupportedVideoModes = 1;
		m_sensors[0].pSupportedVideoModes[0].pixelFormat = ONI_PIXEL_FORMAT_DEPTH_1_MM;
		m_sensors[0].pSupportedVideoModes[0].fps = 30;
		m_sensors[0].pSupportedVideoModes[0].resolutionX = DEPTHSENSE_DEPTH_RESOLUTION_X;
		m_sensors[0].pSupportedVideoModes[0].resolutionY = DEPTHSENSE_DEPTH_RESOLUTION_Y;

		m_sensors[1].pSupportedVideoModes = XN_NEW_ARR(OniVideoMode, 1);
		m_sensors[1].sensorType = ONI_SENSOR_COLOR;
		m_sensors[1].numSupportedVideoModes = 1;
		m_sensors[1].pSupportedVideoModes[0].pixelFormat = ONI_PIXEL_FORMAT_RGB888;
		m_sensors[1].pSupportedVideoModes[0].fps = 30;
		m_sensors[1].pSupportedVideoModes[0].resolutionX = DEPTHSENSE_COLOR_RESOLUTION_X;
		m_sensors[1].pSupportedVideoModes[0].resolutionY = DEPTHSENSE_COLOR_RESOLUTION_Y;

	}
コード例 #3
0
DS325Device::DS325Device(OniDeviceInfo* pInfo, oni::driver::DriverServices& driverServices) : m_pInfo(pInfo), m_driverServices(driverServices) {
	int resID = DS325Config::GetImageResolutionID();

	m_numSensors = 3;

	m_sensors[0].pSupportedVideoModes = XN_NEW_ARR(OniVideoMode, 1);
	m_sensors[0].sensorType = ONI_SENSOR_DEPTH;
	m_sensors[0].numSupportedVideoModes = 1;
	m_sensors[0].pSupportedVideoModes[0].pixelFormat = ONI_PIXEL_FORMAT_DEPTH_1_MM;
	m_sensors[0].pSupportedVideoModes[0].fps = 30;
	m_sensors[0].pSupportedVideoModes[0].resolutionX = DEPTHSENSE_DEPTH_RESOLUTION_X;
	m_sensors[0].pSupportedVideoModes[0].resolutionY = DEPTHSENSE_DEPTH_RESOLUTION_Y;

	m_sensors[1].pSupportedVideoModes = XN_NEW_ARR(OniVideoMode, 1);
	m_sensors[1].sensorType = ONI_SENSOR_COLOR;
	m_sensors[1].numSupportedVideoModes = 1;
	m_sensors[1].pSupportedVideoModes[0].pixelFormat = ONI_PIXEL_FORMAT_RGB888;
	m_sensors[1].pSupportedVideoModes[0].fps = 30;
	m_sensors[1].pSupportedVideoModes[0].resolutionX = DS325Config::GetImageResolutionX(resID);
	m_sensors[1].pSupportedVideoModes[0].resolutionY = DS325Config::GetImageResolutionY(resID);

	m_sensors[2].pSupportedVideoModes = XN_NEW_ARR(OniVideoMode, 1);
	m_sensors[2].sensorType = ONI_SENSOR_IR;
	m_sensors[2].numSupportedVideoModes = 1;
	m_sensors[2].pSupportedVideoModes[0].pixelFormat = ONI_PIXEL_FORMAT_RGB888;
	m_sensors[2].pSupportedVideoModes[0].fps = 30;
	m_sensors[2].pSupportedVideoModes[0].resolutionX = DEPTHSENSE_DEPTH_RESOLUTION_X;
	m_sensors[2].pSupportedVideoModes[0].resolutionY = DEPTHSENSE_DEPTH_RESOLUTION_Y;
}
コード例 #4
0
ファイル: PlayerNode.cpp プロジェクト: 3david/OpenNI
XnStatus PlayerNode::Init()
{
	m_pRecordBuffer = XN_NEW_ARR(XnUInt8, RECORD_MAX_SIZE);
	XN_VALIDATE_ALLOC_PTR(m_pRecordBuffer);
	m_pUncompressedData = XN_NEW_ARR(XnUInt8, DATA_MAX_SIZE);
	XN_VALIDATE_ALLOC_PTR(m_pUncompressedData);
	return XN_STATUS_OK;
}
コード例 #5
0
ファイル: XnNodeManager.cpp プロジェクト: nixz/OpenNI
XnStatus XnNodeManager::Resize(XnUInt32 nDeltaSize)
{
    // Allocate new nodes
    XnNode* pNewNodes = XN_NEW_ARR(XnNode, nDeltaSize);
    if (pNewNodes == NULL)
    {
        return XN_STATUS_ALLOC_FAILED;
    }

    // Connect them to each other
    for (XnUInt32 i = 0; i < nDeltaSize-1; ++i)
    {
        pNewNodes[i].Next() = &(pNewNodes[i+1]);
        pNewNodes[i].Previous() = NULL;
    }

    pNewNodes[nDeltaSize-1].Previous() = NULL;

    m_nCurrentAvailability += nDeltaSize;
    m_nCurrentCapacity += nDeltaSize;

    // Add the new nodes to the list
    m_pAllNodes->AddLast(XnValue(pNewNodes));
    // Replace first available with the first from this batch
    pNewNodes[nDeltaSize-1].Next() = m_pFirstAvailable;
    m_pFirstAvailable = &(pNewNodes[0]);

    return XN_STATUS_OK;
}
コード例 #6
0
ファイル: XnNodeWatcher.cpp プロジェクト: Clebeson/OpenNI
XnStatus AudioWatcher::NotifySupportedOutputModes()
{
	XnUInt32 nModes = m_audioGenerator.GetSupportedWaveOutputModesCount();
	if (nModes == 0)
	{
		return XN_STATUS_ERROR;
	}

	XnStatus nRetVal = NotifyIntPropChanged(XN_PROP_WAVE_SUPPORTED_OUTPUT_MODES_COUNT, nModes);
	XN_IS_STATUS_OK(nRetVal);
	XnWaveOutputMode *pSupportedModes = XN_NEW_ARR(XnWaveOutputMode, nModes);
	XN_VALIDATE_ALLOC_PTR(pSupportedModes);
	nRetVal = m_audioGenerator.GetSupportedWaveOutputModes(pSupportedModes, nModes);
	if (nRetVal != XN_STATUS_OK)
	{
		XN_DELETE_ARR(pSupportedModes);
		return nRetVal;
	}

	nRetVal = NotifyGeneralPropChanged(XN_PROP_WAVE_SUPPORTED_OUTPUT_MODES, nModes * sizeof(XnWaveOutputMode), pSupportedModes);
	if (nRetVal != XN_STATUS_OK)
	{
		XN_DELETE_ARR(pSupportedModes);
		return nRetVal;
	}
	XN_DELETE_ARR(pSupportedModes);
	return XN_STATUS_OK;	
}
コード例 #7
0
ファイル: XnNodeWatcher.cpp プロジェクト: Clebeson/OpenNI
XnStatus MapWatcher::NotifySupportedOutputModes()
{
	XnUInt32 nOutputModes = m_mapGenerator.GetSupportedMapOutputModesCount();
	if (nOutputModes == 0)
	{
		return XN_STATUS_ERROR;
	}

	XnStatus nRetVal = NotifyIntPropChanged(XN_PROP_SUPPORTED_MAP_OUTPUT_MODES_COUNT, nOutputModes);
	XN_IS_STATUS_OK(nRetVal);
	XnMapOutputMode *pOutputModes = XN_NEW_ARR(XnMapOutputMode, nOutputModes);
	XN_VALIDATE_ALLOC_PTR(pOutputModes);
	nRetVal = m_mapGenerator.GetSupportedMapOutputModes(pOutputModes, nOutputModes);
	if (nRetVal != XN_STATUS_OK)
	{
		XN_DELETE_ARR(pOutputModes);
		return nRetVal;
	}
	nRetVal = NotifyGeneralPropChanged(XN_PROP_SUPPORTED_MAP_OUTPUT_MODES, sizeof(pOutputModes[0]) * nOutputModes, pOutputModes);
	if (nRetVal != XN_STATUS_OK)
	{
		XN_DELETE_ARR(pOutputModes);
		return nRetVal;
	}

	XN_DELETE_ARR(pOutputModes);
	return XN_STATUS_OK;	
}
コード例 #8
0
ファイル: XnNodeWatcher.cpp プロジェクト: Clebeson/OpenNI
XnStatus DepthWatcher::NotifyUserPositions()
{
	XnUInt32 nUserPositions = m_depthGenerator.GetUserPositionCap().GetSupportedUserPositionsCount();
	if (nUserPositions == 0)
	{
		return XN_STATUS_NOT_IMPLEMENTED;
	}

	XnStatus nRetVal = NotifyIntPropChanged(XN_PROP_SUPPORTED_USER_POSITIONS_COUNT, nUserPositions);
	XN_IS_STATUS_OK(nRetVal);
	XnBoundingBox3D *pPositions = XN_NEW_ARR(XnBoundingBox3D, nUserPositions);
	XN_VALIDATE_ALLOC_PTR(pPositions);

	XnUInt32 i;
	for (i = 0; i < nUserPositions; i++)
	{
		nRetVal = m_depthGenerator.GetUserPositionCap().GetUserPosition(i, pPositions[i]);
		if (nRetVal != XN_STATUS_OK)
		{
			XN_DELETE_ARR(pPositions);
			return nRetVal;
		}
	}

	nRetVal = NotifyGeneralPropChanged(XN_PROP_USER_POSITIONS, sizeof(pPositions[0]) * nUserPositions, pPositions);
	if (nRetVal != XN_STATUS_OK)
	{
		XN_DELETE_ARR(pPositions);
		return nRetVal;
	}
	
	XN_DELETE_ARR(pPositions);
	return XN_STATUS_OK;	
}
コード例 #9
0
ファイル: OniStream.cpp プロジェクト: Arkapravo/OpenNI2
VideoStream::VideoStream(void* streamHandle, const OniSensorInfo* pSensorInfo, Device& device, const DriverHandler& libraryHandler, xnl::ErrorLogger& errorLogger) :
	m_errorLogger(errorLogger),
	m_pSensorInfo(NULL),
	m_running(true),
	m_device(device),
	m_driverHandler(libraryHandler),
	m_streamHandle(streamHandle),
	m_pContextNewFrameEvent(NULL),
	m_started(FALSE)
{
	xnOSCreateEvent(&m_newFrameInternalEvent, false);
	xnOSCreateEvent(&m_newFrameInternalEventForFrameHolder, false);
	xnOSCreateThread(newFrameThread, this, &m_newFrameThread);
	
	m_pSensorInfo = XN_NEW(OniSensorInfo);
	m_pSensorInfo->sensorType = pSensorInfo->sensorType;
	m_pSensorInfo->numSupportedVideoModes = pSensorInfo->numSupportedVideoModes;
	m_pSensorInfo->pSupportedVideoModes = XN_NEW_ARR(OniVideoMode, m_pSensorInfo->numSupportedVideoModes);
	xnOSMemCopy(m_pSensorInfo->pSupportedVideoModes, pSensorInfo->pSupportedVideoModes, sizeof(OniVideoMode)*m_pSensorInfo->numSupportedVideoModes);

	m_driverHandler.streamSetNewFrameCallback(m_streamHandle, stream_NewFrame, this);
    m_driverHandler.streamSetPropertyChangedCallback(m_streamHandle, stream_PropertyChanged, this);

	refreshWorldConversionCache();
}
コード例 #10
0
ファイル: OniRecorder.cpp プロジェクト: Arkapravo/OpenNI2
void Recorder::onRecord(XnUInt32 nodeId, XnCodecBase* pCodec, const OniFrame* pFrame, XnUInt32 frameId, XnUInt64 timestamp)
{
    if (0 == nodeId || NULL == pFrame)
    {
        return;
    }

    FIND_ATTACHED_STREAM_INFO(nodeId)
    if (!pInfo) return;

    Memento undoPoint(this);

    if (NULL != pCodec)
    {
        XnUInt32 bufferSize_bytes32 = pFrame->dataSize * 2 + pCodec->GetOverheadSize();
        XnUInt8* buffer             = XN_NEW_ARR(XnUInt8, bufferSize_bytes32);

        XnStatus status = pCodec->Compress(reinterpret_cast<const XnUChar*>(pFrame->data), 
                pFrame->dataSize, buffer, &bufferSize_bytes32);
                XnSizeT  bufferSize_bytes = bufferSize_bytes32;
        if (XN_STATUS_OK == status)
        {
            EMIT(RECORD_NEW_DATA(
                    nodeId,
                    pInfo->lastNewDataRecordPosition,
                    timestamp,
                    frameId,
                    buffer,
                    bufferSize_bytes))
        }
        XN_DELETE_ARR(buffer);
    }
// Initialization - set outdir and time of each recording
void MovieMng::Initialize(void)
{
#ifdef LOG_WRITE_ENABLE
	fopen_s(&mmng_fp, LOG_NAME, "a+");
	char time_str[TIME_STR_LEN];
	g_util.GetTimeStr(sizeof(time_str), time_str);
	fprintf(mmng_fp, "=== %s ===\n", time_str);
#endif
	// コンフィグファイル読み込み
	char tmp[16];
	Config config(CONFIG_FILE);
	config.Initialize();

	config.GetValue("MOVIE_LENGTH", sizeof(tmp), tmp);
	m_nMovieLength = atoi(tmp);

	config.GetValue("MOVIE_FILE_WRITE", sizeof(tmp), tmp);
	if(strcmp(tmp, "TRUE") == 0)
	{
		m_bFileWrite = TRUE;
	}

	config.GetValue("DATA_DIR", sizeof(m_strDirName), m_strDirName);
	config.GetValue("MOVIE_FILE_DIR", sizeof(m_strDirName), &m_strDirName[strlen(m_strDirName)]);

	config.GetValue("FACE_DIRECTION", sizeof(tmp), tmp);
	if(strcmp(tmp, "TRUE") == 0)
	{
		m_bSendDepth = TRUE;
	}

	config.GetValue("FACE_HOST", sizeof(m_faceHost), m_faceHost);
	
	config.GetValue("FACE_PORT", sizeof(tmp), tmp);
	m_facePort = (short)atoi(tmp);

	config.GetValue("MOVIE_LENGTH", sizeof(tmp), tmp);
	m_nFileLength = (short)atoi(tmp);

	// バッファ生成 バッファサイズ:10秒分(30fpsと仮定)
	m_nBufferSize = m_nBufferSizeFile = m_nBufferSizeSend = FRAME_BUF_LENGTH * 30;
	m_pFrames = XN_NEW_ARR(SingleFrame, m_nBufferSize);
	
	// スレッド生成
	if (m_bFileWrite == TRUE)
	{
		// ファイル書き込み用
		InitializeCriticalSection(&csFileWriter);
		m_FileWriterTh = (HANDLE)_beginthreadex( NULL, 0, &FileWriter, this, 0, &m_FileWriterId );
	}

	if (m_bSendDepth == TRUE)
	{
		ConnectFaceServer();
		// 深度情報送信用
		InitializeCriticalSection(&csDepthSender);
		m_DepthSenderTh = (HANDLE)_beginthreadex( NULL, 0, &DepthSender, this, 0, &m_DepthSenderId );
	}
}
コード例 #12
0
ファイル: DummyDevice.cpp プロジェクト: 1170390/OpenNI2
	OzDevice(OniDeviceInfo* pInfo, oni::driver::DriverServices& driverServices) : m_pInfo(pInfo), m_driverServices(driverServices)
	{
		m_numSensors = 2;

		m_sensors[0].pSupportedVideoModes = XN_NEW_ARR(OniVideoMode, 1);
		m_sensors[0].sensorType = ONI_SENSOR_DEPTH;
		m_sensors[0].numSupportedVideoModes = 1;
		m_sensors[0].pSupportedVideoModes[0].pixelFormat = ONI_PIXEL_FORMAT_DEPTH_1_MM;
		m_sensors[0].pSupportedVideoModes[0].fps = 30;
		m_sensors[0].pSupportedVideoModes[0].resolutionX = OZ_RESOLUTION_X;
		m_sensors[0].pSupportedVideoModes[0].resolutionY = OZ_RESOLUTION_Y;

		m_sensors[1].pSupportedVideoModes = XN_NEW_ARR(OniVideoMode, 1);
		m_sensors[1].sensorType = ONI_SENSOR_COLOR;
		m_sensors[1].numSupportedVideoModes = 1;
		m_sensors[1].pSupportedVideoModes[0].pixelFormat = ONI_PIXEL_FORMAT_RGB888;
		m_sensors[1].pSupportedVideoModes[0].fps = 30;
		m_sensors[1].pSupportedVideoModes[0].resolutionX = OZ_RESOLUTION_X;
		m_sensors[1].pSupportedVideoModes[0].resolutionY = OZ_RESOLUTION_Y;

	}
コード例 #13
0
	KinectV2Device(oni::driver::DriverServices& driverServices, CComPtr<IKinectSensor>& kinect)
        : kinect_( kinect )
        , m_driverServices(driverServices)
	{
		m_numSensors = 2;

		m_sensors[0].pSupportedVideoModes = XN_NEW_ARR(OniVideoMode, 1);
		m_sensors[0].sensorType = ONI_SENSOR_DEPTH;
		m_sensors[0].numSupportedVideoModes = 1;
		m_sensors[0].pSupportedVideoModes[0].pixelFormat = ONI_PIXEL_FORMAT_DEPTH_1_MM;
		m_sensors[0].pSupportedVideoModes[0].fps = 30;
		m_sensors[0].pSupportedVideoModes[0].resolutionX = DEPTH_WIDTH;
        m_sensors[0].pSupportedVideoModes[0].resolutionY = DEPTH_HEIGHT;

		m_sensors[1].pSupportedVideoModes = XN_NEW_ARR(OniVideoMode, 1);
		m_sensors[1].sensorType = ONI_SENSOR_COLOR;
		m_sensors[1].numSupportedVideoModes = 1;
		m_sensors[1].pSupportedVideoModes[0].pixelFormat = ONI_PIXEL_FORMAT_RGB888;
		m_sensors[1].pSupportedVideoModes[0].fps = 30;
        m_sensors[1].pSupportedVideoModes[0].resolutionX = COLOR_WIDTH;
        m_sensors[1].pSupportedVideoModes[0].resolutionY = COLOR_HEIGHT;
	}
コード例 #14
0
ファイル: PlayerNode.cpp プロジェクト: jgollub/MetaImagerProj
XnStatus PlayerNode::OpenStream()
{
	XN_VALIDATE_INPUT_PTR(m_pInputStream);
	XnStatus nRetVal = m_pInputStream->Open(m_pStreamCookie);
	XN_IS_STATUS_OK(nRetVal);
	RecordingHeader header;
	XnUInt32 nBytesRead = 0;
	
	nRetVal = m_pInputStream->Read(m_pStreamCookie, &header, sizeof(header), &nBytesRead);
	XN_IS_STATUS_OK(nRetVal);
	if (nBytesRead < sizeof(header))
	{
		XN_LOG_ERROR_RETURN(XN_STATUS_CORRUPT_FILE, XN_MASK_OPEN_NI, "Not enough bytes read");
	}

	/* Check header */
	if (xnOSMemCmp(header.headerMagic, DEFAULT_RECORDING_HEADER.headerMagic, sizeof(header.headerMagic)) != 0)
	{
		XN_LOG_ERROR_RETURN(XN_STATUS_CORRUPT_FILE, XN_MASK_OPEN_NI, "Invalid header magic");
	}

	if ((xnVersionCompare(&header.version, &OLDEST_SUPPORTED_FILE_FORMAT_VERSION) < 0) || //File format is too old
		(xnVersionCompare(&header.version, &DEFAULT_RECORDING_HEADER.version) > 0)) //File format is too new
	{
		XN_LOG_ERROR_RETURN(XN_STATUS_UNSUPPORTED_VERSION, XN_MASK_OPEN_NI, "Unsupported file format version: %u.%u.%u.%u", header.version.nMajor, header.version.nMinor, header.version.nMaintenance, header.version.nBuild);
	}

	m_nGlobalMaxTimeStamp = header.nGlobalMaxTimeStamp;
	m_nMaxNodes = header.nMaxNodeID + 1;
	XN_ASSERT(m_nMaxNodes > 0);
	XN_DELETE_ARR(m_pNodeInfoMap);
	xnOSFree(m_aSeekTempArray);
	m_pNodeInfoMap = XN_NEW_ARR(PlayerNodeInfo, m_nMaxNodes);
	XN_VALIDATE_ALLOC_PTR(m_pNodeInfoMap);
	XN_VALIDATE_CALLOC(m_aSeekTempArray, DataIndexEntry*, m_nMaxNodes);
	
	m_bOpen = TRUE;
	nRetVal = ProcessUntilFirstData();
	if (nRetVal != XN_STATUS_OK)
	{
		XN_DELETE_ARR(m_pNodeInfoMap);
		m_pNodeInfoMap = NULL;
		xnOSFree(m_aSeekTempArray);
		m_aSeekTempArray = NULL;
		return nRetVal;
	}

	return XN_STATUS_OK;
}
コード例 #15
0
//---------------------------------------------------------------------------
// Code
//---------------------------------------------------------------------------
ClosestUserSelector::ClosestUserSelector(xn::UserGenerator *pUserGenerator,
                                         TrackingInitializer *pTrackingInitializer,
                                         XnUInt32 nMaxNumUsers) :
                                                            UserSelector(pUserGenerator),
                                                            m_pTrackingInitializer(pTrackingInitializer),
                                                            m_nMaxNumUsers(nMaxNumUsers),
                                                            m_pUsersList(NULL)
{
    VALIDATE(m_bValid,"UserSelector was not initialized properly");
    VALIDATE(pTrackingInitializer!=NULL,"NULL tracking initializer supplied");
    VALIDATE(pTrackingInitializer->IsValid(),"tracking initializer supplied is invalid");
    VALIDATE(m_pUserGenerator!=NULL,"NULL user generator supplied");
    VALIDATE(m_nMaxNumUsers>0, "0 users tracking");
    m_pUsersList=XN_NEW_ARR(UserInfo,m_nMaxNumUsers);
}
コード例 #16
0
ファイル: OniContext.cpp プロジェクト: quintona/openni2
OniStatus Context::getDeviceList(OniDeviceInfo** pDevices, int* pDeviceCount)
{
	m_cs.Lock();

	*pDeviceCount = m_devices.Size();
	*pDevices = XN_NEW_ARR(OniDeviceInfo, *pDeviceCount);

	int idx = 0;
	for (xnl::List<Device*>::ConstIterator iter = m_devices.Begin(); iter != m_devices.End(); ++iter, ++idx)
	{
		xnOSMemCopy((*pDevices)+idx, (*iter)->getInfo(), sizeof(OniDeviceInfo));
	}

	m_cs.Unlock();
	return ONI_STATUS_OK;

}
コード例 #17
0
ファイル: PlayerDevice.cpp プロジェクト: Arkapravo/OpenNI2
OniStatus PlayerDevice::getSensorInfoList(OniSensorInfo** pSources, int* numSources)
{
	Lock();

	// Update source count.
	*numSources = (int)m_sources.Size();
	*pSources = XN_NEW_ARR(OniSensorInfo, *numSources);

	// Copy sources.
	SourceList::Iterator iter = m_sources.Begin();
	for (int i = 0; i < *numSources; ++i, ++iter)
	{
		xnOSMemCopy(&(*pSources)[i],  (*iter)->GetInfo(), sizeof(OniSensorInfo));
	}

	Unlock();

    return ONI_STATUS_OK;
}
コード例 #18
0
ファイル: MockDepthGenerator.cpp プロジェクト: 3david/OpenNI
XnStatus MockDepthGenerator::SetGeneralProperty(const XnChar* strName, XnUInt32 nBufferSize, const void* pBuffer)
{
	XN_VALIDATE_INPUT_PTR(strName);
	XN_VALIDATE_INPUT_PTR(pBuffer);

	XnStatus nRetVal = XN_STATUS_OK;
	if (strcmp(strName, XN_PROP_FIELD_OF_VIEW) == 0)
	{
		if (nBufferSize != sizeof(XnFieldOfView))
		{
			XN_LOG_ERROR_RETURN(XN_STATUS_INVALID_BUFFER_SIZE, XN_MASK_OPEN_NI, "Cannot set XN_PROP_FIELD_OF_VIEW - buffer size is incorrect");
		}
		
		const XnFieldOfView* pFOV = (const XnFieldOfView*)pBuffer;
		nRetVal = SetFieldOfView(*pFOV);
		XN_IS_STATUS_OK(nRetVal);
	}
	else if (strcmp(strName, XN_PROP_USER_POSITIONS) == 0)
	{
		if (m_bSupportedUserPositionsCountReceived)
		{
			m_bSupportedUserPositionsCountReceived = FALSE;
			if (nBufferSize != m_nSupportedUserPositionsCount * sizeof(XnBoundingBox3D))
			{
				XN_LOG_ERROR_RETURN(XN_STATUS_INVALID_BUFFER_SIZE, XN_MASK_OPEN_NI, "Cannot set XN_PROP_USER_POSITIONS - buffer size is incorrect");
			}

			XN_DELETE_ARR(m_pUserPositions);
			m_pUserPositions = XN_NEW_ARR(XnBoundingBox3D, m_nSupportedUserPositionsCount);
			XN_VALIDATE_ALLOC_PTR(m_pUserPositions);
			xnOSMemCopy(m_pUserPositions, pBuffer, nBufferSize);
		}
		else
		{
			/*We got XN_PROP_USER_POSITIONS without 
			  XN_PROP_SUPPORTED_USER_POSITIONS_COUNT before it - that's an error*/
			XN_ASSERT(FALSE);
			XN_LOG_ERROR_RETURN(XN_STATUS_ERROR, XN_MASK_OPEN_NI, "got XN_PROP_USER_POSITIONS without XN_PROP_SUPPORTED_USER_POSITIONS_COUNT before it.")
		}
	}
	else
	{
コード例 #19
0
ファイル: OniDataRecords.cpp プロジェクト: 1170390/OpenNI2
void RecordAssembler::initialize()
{
    XnSizeT maxHeaderSize_bytes = 
        /* size of header POD   = */ sizeof(RecordHeaderData) + 
        /* size of node name    = */ (ONI_MAX_STR + 1) +
        /* size of time stamp   = */ sizeof(XnUInt64) +
        /* size of frame number = */ sizeof(XnUInt32);

    m_bufferSize_bytes = (XnSizeT)(maxHeaderSize_bytes +
        /* max video mode width (pixels)  = */ 1600 *
        /* max video mode height (pixels) = */ 1200 *
        /* max video mode bits per pixel  = */ 3    *
        /* worst case compression rate    = */ 1.2);

    m_pEmitPtr = m_pBuffer = XN_NEW_ARR(XnUInt8, m_bufferSize_bytes);
    if (NULL == m_pBuffer)
    {
        m_bufferSize_bytes = 0;
    } 
}
コード例 #20
0
// Setup the recorder.
//----------------------------------------
void ofxOpenNIRecorder::setup(ofxOpenNIContext*	pContext
							  ,int				b_record_type
							  ,int				b_record_time
							  ,bool				b_record_image
							  ,bool				b_record_ir
							  ,bool				b_record_depth)
{
	
	// set context and generator references
	context = pContext;
	context->getDepthGenerator(&depth_generator);
	if (b_record_image) context->getImageGenerator(&image_generator);
	
	// check we have correct settings for recording image OR ir generators
	if (b_record_ir || (b_record_image && !image_generator.IsValid())) context->getIRGenerator(&ir_generator);
	
	if ((b_record_ir || b_record_image) && !image_generator.IsValid() && ir_generator.IsValid()) {
		printf("Switching recording to IR generator");
		b_record_ir		= true;
		b_record_image	= false;
	}
	if ((b_record_ir || b_record_image) && !image_generator.IsValid() && !ir_generator.IsValid()) {
		printf("No Image or IR generator detected!");
		b_record_ir		= false;
		b_record_image	= false;
	}
	
	// set configuration
	config.record_image = b_record_image;
	config.record_ir	= b_record_ir;
	config.record_depth = b_record_depth;
	config.record_type	= b_record_type;
	config.record_time	= b_record_time;
	
	// set buffer size
	m_nBufferSize = config.record_time * 30;
	frames = XN_NEW_ARR(SingleFrame, m_nBufferSize);
	
	is_recording = false;
	
}
XnStatus XnServerSensorInvoker::GetStreamMaxResolution(SensorInvokerStream* pStream, XnUInt32& nMaxNumPixels)
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	XnUInt64 nCount = 0;
	nRetVal = m_sensor.GetProperty(pStream->strType, XN_STREAM_PROPERTY_SUPPORT_MODES_COUNT, &nCount);
	XN_IS_STATUS_OK(nRetVal);

	XnCmosPreset* aPresets = XN_NEW_ARR(XnCmosPreset, nCount);
	nRetVal = m_sensor.GetProperty(pStream->strType, XN_STREAM_PROPERTY_SUPPORT_MODES, XnGeneralBufferPack(aPresets, nCount * sizeof(XnCmosPreset)));
	if (nRetVal != XN_STATUS_OK)
	{
		XN_DELETE_ARR(aPresets);
		return nRetVal;
	}

	XnUInt32 nMaxPixels = 0;
	for (XnUInt32 i = 0; i < nCount; ++i)
	{
		XnUInt32 nXRes;
		XnUInt32 nYRes;
		if (!XnDDKGetXYFromResolution((XnResolutions)aPresets[i].nResolution, &nXRes, &nYRes))
		{
			continue;
		}

		if (nXRes * nYRes > nMaxPixels)
		{
			nMaxPixels = nXRes * nYRes;
		}
	}

	XN_ASSERT(nMaxPixels > 0);

	XN_DELETE_ARR(aPresets);

	nMaxNumPixels = nMaxPixels;
	
	return (XN_STATUS_OK);
}
コード例 #22
0
ファイル: LinkOniMapStream.cpp プロジェクト: 1170390/OpenNI2
XnStatus LinkOniMapStream::FillSupportedVideoModes()
{
	int nCount;
	const xnl::Array<XnFwStreamVideoMode> *pSupported;
	pSupported = &m_pInputStream->GetSupportedVideoModes();
	nCount = (int)pSupported->GetSize();

	m_aSupportedModes = XN_NEW_ARR(SupportedVideoMode, nCount);
	XN_VALIDATE_ALLOC_PTR(m_aSupportedModes);
	m_nSupportedModesCount = nCount;

	for (int i = 0; i < nCount; ++i)
	{
		m_aSupportedModes[i].nInputFormat			= pSupported->GetData()[i].m_nPixelFormat;
		
		m_aSupportedModes[i].OutputMode.resolutionX	= pSupported->GetData()[i].m_nXRes;
		m_aSupportedModes[i].OutputMode.resolutionY	= pSupported->GetData()[i].m_nYRes;;
		m_aSupportedModes[i].OutputMode.fps			= pSupported->GetData()[i].m_nFPS;;
		m_aSupportedModes[i].OutputMode.pixelFormat	= (OniPixelFormat)-1; // this field is not to be used here.;
	}

	return (XN_STATUS_OK);
}
コード例 #23
0
ファイル: main.cpp プロジェクト: ABMNYZ/OpenNI
	// Initialization - set outdir and time of each recording
	void Initialize(XnChar* strDirName, XnUInt32 nSeconds)
	{
		xnOSStrCopy(m_strDirName, strDirName, XN_FILE_MAX_PATH);
		m_nBufferSize = nSeconds*30;
		m_pFrames = XN_NEW_ARR(SingleFrame, m_nBufferSize);
	}
コード例 #24
0
ファイル: OniContext.cpp プロジェクト: quintona/openni2
XnStatus Context::loadLibraries(const char* directoryName)
{
	XnStatus nRetVal;

	// Get a file list of Xiron devices

	XnInt32 nFileCount = 0;
	typedef XnChar FileName[XN_FILE_MAX_PATH];
	FileName* acsFileList = NULL;

#if (ONI_PLATFORM != ONI_PLATFORM_ANDROID_ARM)
	XnChar cpSearchString[XN_FILE_MAX_PATH] = "";

	xnLogVerbose(XN_MASK_ONI_CONTEXT, "Looking for drivers in drivers repository '%s'", directoryName);

	// Build the search pattern string
	XN_VALIDATE_STR_APPEND(cpSearchString, directoryName, XN_FILE_MAX_PATH, nRetVal);
	XN_VALIDATE_STR_APPEND(cpSearchString, XN_FILE_DIR_SEP, XN_FILE_MAX_PATH, nRetVal);
	XN_VALIDATE_STR_APPEND(cpSearchString, XN_SHARED_LIBRARY_PREFIX, XN_FILE_MAX_PATH, nRetVal);
	XN_VALIDATE_STR_APPEND(cpSearchString, XN_FILE_ALL_WILDCARD, XN_FILE_MAX_PATH, nRetVal);
	XN_VALIDATE_STR_APPEND(cpSearchString, XN_SHARED_LIBRARY_POSTFIX, XN_FILE_MAX_PATH, nRetVal);

	nRetVal = xnOSCountFiles(cpSearchString, &nFileCount);
	if (nRetVal != XN_STATUS_OK || nFileCount == 0)
	{
		xnLogError(XN_MASK_ONI_CONTEXT, "Found no drivers matching '%s'", cpSearchString);
		m_errorLogger.Append("Found no files matching '%s'", cpSearchString);
		return XN_STATUS_NO_MODULES_FOUND;
	}

	acsFileList = XN_NEW_ARR(FileName, nFileCount);
	nRetVal = xnOSGetFileList(cpSearchString, NULL, acsFileList, nFileCount, &nFileCount);
#else
	// Android
	nFileCount = 3;
	acsFileList = XN_NEW_ARR(FileName, nFileCount);
	strcpy(acsFileList[0], "libPS1080.so");
	strcpy(acsFileList[1], "libOniFile.so");
	strcpy(acsFileList[2], "libPSLink.so");
#endif

	// Save directory
	XnChar workingDir[XN_FILE_MAX_PATH];
	xnOSGetCurrentDir(workingDir, XN_FILE_MAX_PATH);
	// Change directory
	xnOSSetCurrentDir(directoryName);

	for (int i = 0; i < nFileCount; ++i)
	{
		DeviceDriver* pDeviceDriver = XN_NEW(DeviceDriver, acsFileList[i], m_frameManager, m_errorLogger);
		if (pDeviceDriver == NULL || !pDeviceDriver->isValid())
		{
			xnLogVerbose(XN_MASK_ONI_CONTEXT, "Couldn't use file '%s' as a device driver", acsFileList[i]);
			m_errorLogger.Append("Couldn't understand file '%s' as a device driver", acsFileList[i]);
			XN_DELETE(pDeviceDriver);
			continue;
		}
		OniCallbackHandle dummy;
		pDeviceDriver->registerDeviceConnectedCallback(deviceDriver_DeviceConnected, this, dummy);
		pDeviceDriver->registerDeviceDisconnectedCallback(deviceDriver_DeviceDisconnected, this, dummy);
		pDeviceDriver->registerDeviceStateChangedCallback(deviceDriver_DeviceStateChanged, this, dummy);
		if (!pDeviceDriver->initialize())
		{
			xnLogVerbose(XN_MASK_ONI_CONTEXT, "Couldn't use file '%s' as a device driver", acsFileList[i]);
			m_errorLogger.Append("Couldn't initialize device driver from file '%s'", acsFileList[i]);
			XN_DELETE(pDeviceDriver);
			continue;
		}
		m_cs.Lock();
		m_deviceDrivers.AddLast(pDeviceDriver);
		m_cs.Unlock();
	}

	// Return to directory
	xnOSSetCurrentDir(workingDir);

	if (m_deviceDrivers.Size() == 0)
	{
		xnLogError(XN_MASK_ONI_CONTEXT, "Found no valid drivers");
		m_errorLogger.Append("Found no valid drivers in '%s'", directoryName);
		return XN_STATUS_NO_MODULES_FOUND;
	}

	XN_DELETE_ARR(acsFileList);

	return XN_STATUS_OK;
}
コード例 #25
0
ファイル: XnOniDevice.cpp プロジェクト: MetaMagic/OpenNI2
XnStatus XnOniDevice::FillSupportedVideoModes()
{
	XnUInt32 nSupportedModes      = 0;
	XnCmosPreset* pSupportedModes = NULL;
	
	int s = 0;

	// Depth
	nSupportedModes = m_sensor.GetDevicePrivateData()->FWInfo.depthModes.GetSize();
	pSupportedModes = m_sensor.GetDevicePrivateData()->FWInfo.depthModes.GetData();

	m_sensors[s].sensorType             = ONI_SENSOR_DEPTH;
	m_sensors[s].pSupportedVideoModes   = XN_NEW_ARR(OniVideoMode, nSupportedModes);
	XN_VALIDATE_ALLOC_PTR(m_sensors[s].pSupportedVideoModes);
	
	OniPixelFormat depthFormats[] = { ONI_PIXEL_FORMAT_DEPTH_1_MM, ONI_PIXEL_FORMAT_DEPTH_100_UM };
	XnSizeT depthFormatsCount = sizeof(depthFormats) / sizeof(depthFormats[0]);

	int writeIndex = 0;
	for(XnUInt32 i = 0; i < nSupportedModes; ++i)
	{
		for (XnSizeT formatIndex = 0; formatIndex < depthFormatsCount; ++formatIndex)
		{
			m_sensors[s].pSupportedVideoModes[writeIndex].pixelFormat = depthFormats[formatIndex];
			m_sensors[s].pSupportedVideoModes[writeIndex].fps = pSupportedModes[i].nFPS;
			XnBool bOK = XnDDKGetXYFromResolution(
				(XnResolutions)pSupportedModes[i].nResolution,
				(XnUInt32*)&m_sensors[s].pSupportedVideoModes[writeIndex].resolutionX,
				(XnUInt32*)&m_sensors[s].pSupportedVideoModes[writeIndex].resolutionY
				);
			XN_ASSERT(bOK);
			XN_REFERENCE_VARIABLE(bOK);

			bool foundMatch = false;
			for (int j = 0; j < writeIndex; ++j)
			{
				if (m_sensors[s].pSupportedVideoModes[writeIndex].pixelFormat == m_sensors[s].pSupportedVideoModes[j].pixelFormat &&
					m_sensors[s].pSupportedVideoModes[writeIndex].fps == m_sensors[s].pSupportedVideoModes[j].fps &&
					m_sensors[s].pSupportedVideoModes[writeIndex].resolutionX == m_sensors[s].pSupportedVideoModes[j].resolutionX &&
					m_sensors[s].pSupportedVideoModes[writeIndex].resolutionY == m_sensors[s].pSupportedVideoModes[j].resolutionY)
				{
					// Already know this configuration
					foundMatch = true;
					break;
				}
			}
			if (!foundMatch)
			{
				++writeIndex;
			}
		}
	}
	m_sensors[s].numSupportedVideoModes = writeIndex;

	// Image

	// first, make sure that our sensor actually supports Image
	XnUInt64 nImageSupported = FALSE;
	XnStatus nRetVal = m_sensor.GetProperty(XN_MASK_DEVICE, XN_MODULE_PROPERTY_IMAGE_SUPPORTED, &nImageSupported);
	XN_IS_STATUS_OK(nRetVal);
	if (nImageSupported)
	{
		++s;
		nSupportedModes = m_sensor.GetDevicePrivateData()->FWInfo.imageModes.GetSize();
		pSupportedModes = m_sensor.GetDevicePrivateData()->FWInfo.imageModes.GetData();

		m_sensors[s].sensorType             = ONI_SENSOR_COLOR;
		m_sensors[s].numSupportedVideoModes = 0; // to be changed later..
		m_sensors[s].pSupportedVideoModes   = XN_NEW_ARR(OniVideoMode, nSupportedModes * 10);
		XN_VALIDATE_ALLOC_PTR(m_sensors[s].pSupportedVideoModes);
		
		writeIndex = 0;
		for(XnUInt32 j=0; j < nSupportedModes; ++j)
		{
			// make an OniVideoMode for each OniFormat supported by the input format
			OniPixelFormat aOniFormats[10];
			int       nOniFormats = 0;
			XnOniColorStream::GetAllowedOniOutputFormatForInputFormat((XnIOImageFormats)pSupportedModes[j].nFormat, aOniFormats, &nOniFormats);
			for(int curOni=0; curOni<nOniFormats; ++curOni)
			{
				m_sensors[s].pSupportedVideoModes[writeIndex].pixelFormat = aOniFormats[curOni];
			
				m_sensors[s].pSupportedVideoModes[writeIndex].fps = pSupportedModes[j].nFPS;
				XnBool bOK = XnDDKGetXYFromResolution(
					(XnResolutions)pSupportedModes[j].nResolution,
					(XnUInt32*)&m_sensors[s].pSupportedVideoModes[writeIndex].resolutionX,
					(XnUInt32*)&m_sensors[s].pSupportedVideoModes[writeIndex].resolutionY
					);
				XN_ASSERT(bOK);
				XN_REFERENCE_VARIABLE(bOK);

				bool foundMatch = false;
				for (int i = 0; i < writeIndex; ++i)
				{
					if (m_sensors[s].pSupportedVideoModes[writeIndex].pixelFormat == m_sensors[s].pSupportedVideoModes[i].pixelFormat &&
						m_sensors[s].pSupportedVideoModes[writeIndex].fps == m_sensors[s].pSupportedVideoModes[i].fps &&
						m_sensors[s].pSupportedVideoModes[writeIndex].resolutionX == m_sensors[s].pSupportedVideoModes[i].resolutionX &&
						m_sensors[s].pSupportedVideoModes[writeIndex].resolutionY == m_sensors[s].pSupportedVideoModes[i].resolutionY)
					{
						// Already know this configuration
						foundMatch = true;
						break;
					}
				}
				if (!foundMatch)
				{
					++writeIndex;
				}
			}
		}
		m_sensors[s].numSupportedVideoModes = writeIndex;
	}

	// IR
	++s;
	nSupportedModes = m_sensor.GetDevicePrivateData()->FWInfo.irModes.GetSize();
	pSupportedModes = m_sensor.GetDevicePrivateData()->FWInfo.irModes.GetData();

	m_sensors[s].sensorType             = ONI_SENSOR_IR;
	m_sensors[s].pSupportedVideoModes   = XN_NEW_ARR(OniVideoMode, nSupportedModes);
	XN_VALIDATE_ALLOC_PTR(m_sensors[s].pSupportedVideoModes);
	
	writeIndex = 0;
	for(XnUInt32 i=0; i < nSupportedModes; ++i)
	{
		m_sensors[s].pSupportedVideoModes[writeIndex].pixelFormat = ONI_PIXEL_FORMAT_GRAY16;
		m_sensors[s].pSupportedVideoModes[writeIndex].fps = pSupportedModes[i].nFPS;
		XnBool bOK = XnDDKGetXYFromResolution(
			(XnResolutions)pSupportedModes[i].nResolution,
			(XnUInt32*)&m_sensors[s].pSupportedVideoModes[writeIndex].resolutionX,
			(XnUInt32*)&m_sensors[s].pSupportedVideoModes[writeIndex].resolutionY
			);
		XN_ASSERT(bOK);
		XN_REFERENCE_VARIABLE(bOK);

		bool foundMatch = false;
		for (int j = 0; j < writeIndex; ++j)
		{
			if (m_sensors[s].pSupportedVideoModes[writeIndex].pixelFormat == m_sensors[s].pSupportedVideoModes[j].pixelFormat &&
				m_sensors[s].pSupportedVideoModes[writeIndex].fps == m_sensors[s].pSupportedVideoModes[j].fps &&
				m_sensors[s].pSupportedVideoModes[writeIndex].resolutionX == m_sensors[s].pSupportedVideoModes[j].resolutionX &&
				m_sensors[s].pSupportedVideoModes[writeIndex].resolutionY == m_sensors[s].pSupportedVideoModes[j].resolutionY)
			{
				// Already know this configuration
				foundMatch = true;
				break;
			}
		}
		if (!foundMatch)
		{
			++writeIndex;
		}
	}
	m_sensors[s].numSupportedVideoModes = writeIndex;
	m_numSensors = s+1;

	return XN_STATUS_OK;
}
XnStatus XnServerSensorInvoker::SetStreamSharedMemory(SensorInvokerStream* pStream)
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	// give shared memory a name (to make the name unique, we'll add process ID)
	XN_PROCESS_ID procID;
	xnOSGetCurrentProcessID(&procID);
	XnChar strSharedMemoryName[XN_FILE_MAX_PATH];
	sprintf(strSharedMemoryName, "%u_%s_%s", (XnUInt32)procID, m_sensor.GetUSBPath(), pStream->strType);
	nRetVal = pStream->pSharedMemoryName->UnsafeUpdateValue(strSharedMemoryName);
	XN_IS_STATUS_OK(nRetVal);

	XnUInt32 nBufferSize = 0;
	XnUInt32 nPixelSize = 0;

	if (strcmp(pStream->strType, XN_STREAM_TYPE_DEPTH) == 0)
	{
		// have space for depth and shift values
		nPixelSize = sizeof(XnDepthPixel) + sizeof(XnUInt16);
	}
	else if (strcmp(pStream->strType, XN_STREAM_TYPE_IMAGE) == 0)
	{
		// biggest pixel size is the RGB24
		nPixelSize = sizeof(XnRGB24Pixel);
	}
	else if (strcmp(pStream->strType, XN_STREAM_TYPE_IR) == 0)
	{
		// biggest pixel size is the RGB24
		nPixelSize = sizeof(XnIRPixel);
	}
	else
	{
		XN_ASSERT(FALSE);
		return XN_STATUS_ERROR;
	}

	// find out max resolution
	XnUInt32 nMaxNumPixels = 0;
	nRetVal = GetStreamMaxResolution(pStream, nMaxNumPixels);
	XN_IS_STATUS_OK(nRetVal);

	nBufferSize = (XnUInt32)(nMaxNumPixels * nPixelSize * m_numberOfBuffers.GetValue());

	// allocate shared memory
	nRetVal = xnOSCreateSharedMemoryEx(strSharedMemoryName, nBufferSize, XN_OS_FILE_READ | XN_OS_FILE_WRITE, m_allowOtherUsers.GetValue() == TRUE, &pStream->hSharedMemory);
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = xnOSSharedMemoryGetAddress(pStream->hSharedMemory, (void**)&pStream->pSharedMemoryAddress);
	XN_IS_STATUS_OK(nRetVal);

	// Set buffer pool for this stream
	XnGeneralBuffer* aBuffers = XN_NEW_ARR(XnGeneralBuffer, m_numberOfBuffers.GetValue());
	XnUInt32 nSingleBufferSize = nBufferSize / m_numberOfBuffers.GetValue();
	for (XnUInt32 i = 0; i < m_numberOfBuffers.GetValue(); ++i)
	{
		aBuffers[i].pData = pStream->pSharedMemoryAddress + (i * nSingleBufferSize);
		aBuffers[i].nDataSize = nSingleBufferSize;
	}

	nRetVal = m_sensor.SetProperty(pStream->strType, XN_STREAM_PROPERTY_EXTERNAL_BUFFER_POOL, XnGeneralBufferPack(aBuffers, m_numberOfBuffers.GetValue() * sizeof(XnGeneralBuffer)));
	XN_DELETE_ARR(aBuffers);

	XN_IS_STATUS_OK(nRetVal);
	
	return (XN_STATUS_OK);
}
コード例 #27
0
ファイル: OniRecorder.cpp プロジェクト: Arkapravo/OpenNI2
void Recorder::onAttach(XnUInt32 nodeId, VideoStream* pStream)
{
    if (nodeId == 0 || pStream == NULL) 
    {
        return;
    }
    const OniSensorInfo* pSensorInfo = pStream->getSensorInfo();
    if (pSensorInfo == NULL)
    {
        return;
    }

    // Assume we'll be using uncompressed codec.
    XnUInt32 codecId = ONI_CODEC_UNCOMPRESSED;

    // Applicable for depth streams only.
    int maxDepth = XN_MAX_UINT16;

    OniVideoMode curVideoMode;
    int size = sizeof(OniVideoMode);
    pStream->getProperty(ONI_STREAM_PROPERTY_VIDEO_MODE, &curVideoMode, &size);

    // Guess codec type from video mode format.
    switch (curVideoMode.pixelFormat)
    {
    case ONI_PIXEL_FORMAT_DEPTH_100_UM:
    case ONI_PIXEL_FORMAT_DEPTH_1_MM:
        {
            size = int(sizeof(maxDepth));

            pStream->getProperty(
                    ONI_STREAM_PROPERTY_MAX_VALUE, &maxDepth, &size);

            m_streams[pStream].pCodec = XN_NEW(
                    Xn16zEmbTablesCodec, static_cast<XnUInt16>(maxDepth));

            codecId = ONI_CODEC_16Z_EMB_TABLES;
        }
        break;
    case ONI_PIXEL_FORMAT_RGB888:
        {
            if (m_streams[pStream].allowLossyCompression)
            {
                m_streams[pStream].pCodec = XN_NEW(
                        XnJpegCodec, 
                        /* bRGB = */ TRUE, 
                        curVideoMode.resolutionX,
                        curVideoMode.resolutionY);

                codecId = ONI_CODEC_JPEG;
            }
            else
            {
                m_streams[pStream].pCodec = XN_NEW(XnUncompressedCodec);
            }
        }
        break;
    default:
        m_streams[pStream].pCodec = XN_NEW(XnUncompressedCodec);
        break;
    }

    // If anything went wrong - fall back to uncompressed format. 
    if (XN_STATUS_OK != m_streams[pStream].pCodec->Init())
    {
        XN_DELETE(m_streams[pStream].pCodec);
        m_streams[pStream].pCodec = NULL;
        codecId = ONI_CODEC_UNCOMPRESSED;
    }
    
    Memento undoPoint(this);
    // save the position of this record so we can override it upon detaching
    m_streams[pStream].nodeAddedRecordPosition = undoPoint.GetPosition();

    EMIT(RECORD_NODE_ADDED(
            m_streams[pStream].nodeType = AsNodeType(pSensorInfo->sensorType),
            nodeId,
            m_streams[pStream].codecId = codecId,
            /* numberOfFrames    = */ XN_MAX_UINT32,
            /* minTimeStamp      = */ XN_UINT64_C(0),
            /* maxTimeStamp      = */ XN_MAX_UINT64,
            /* seekTablePosition = */ XN_UINT64_C(0)
        ))
    undoPoint.Reuse();

	// isGenerating (needed for OpenNI 1.x playback)
	EMIT(RECORD_INT_PROPERTY(
		nodeId,
		getLastPropertyRecordPos(nodeId, "xnIsGenerating", undoPoint.GetPosition()),
		"xnIsGenerating",
		TRUE
		));
	undoPoint.Reuse();

    // xnDeviceMaxDepth
    if (curVideoMode.pixelFormat == ONI_PIXEL_FORMAT_DEPTH_1_MM ||
        curVideoMode.pixelFormat == ONI_PIXEL_FORMAT_DEPTH_100_UM)
    {
        EMIT(RECORD_INT_PROPERTY(
                nodeId,
                getLastPropertyRecordPos(nodeId, "xnDeviceMaxDepth", undoPoint.GetPosition()),
                "xnDeviceMaxDepth",
                maxDepth
            ))
    }
    undoPoint.Reuse();

    // xnSupportedMapOutputModesCount
    EMIT(RECORD_INT_PROPERTY(
            nodeId,
            getLastPropertyRecordPos(nodeId, "xnSupportedMapOutputModesCount", undoPoint.GetPosition()),
            "xnSupportedMapOutputModesCount",
            pSensorInfo->numSupportedVideoModes
        ))
    undoPoint.Reuse();

    // xnSupportedMapOutputModes
    VideoModeData* pVideoModes = XN_NEW_ARR(
            VideoModeData, pSensorInfo->numSupportedVideoModes);
    for (int i = 0; i < pSensorInfo->numSupportedVideoModes; ++i)
    {
        const OniVideoMode& videoMode = pSensorInfo->pSupportedVideoModes[i];
        pVideoModes[i].width  = videoMode.resolutionX;
        pVideoModes[i].height = videoMode.resolutionY;
        pVideoModes[i].fps    = videoMode.fps;
    }

    EMIT(RECORD_GENERAL_PROPERTY(
            nodeId,
            getLastPropertyRecordPos(nodeId, "xnSupportedMapOutputModes", undoPoint.GetPosition()),
            "xnSupportedMapOutputModes",
            pVideoModes,
            sizeof(*pVideoModes) * pSensorInfo->numSupportedVideoModes
        ))
    undoPoint.Reuse();

    // xnSupportedPixelFormats
    XnSupportedPixelFormats supportedPixelFormats;
    fillXnSupportedPixelFormats(supportedPixelFormats, curVideoMode.pixelFormat);
    EMIT(RECORD_GENERAL_PROPERTY(
            nodeId,
            getLastPropertyRecordPos(nodeId, "xnSupportedPixelFormats", undoPoint.GetPosition()),
            "xnSupportedPixelFormats",
            &supportedPixelFormats,
            sizeof(supportedPixelFormats)
        ))
    undoPoint.Reuse();

    // xnMapOutputMode
    VideoModeData curVMD;
    curVMD.width  = curVideoMode.resolutionX;
    curVMD.height = curVideoMode.resolutionY;
    curVMD.fps    = curVideoMode.fps;
    EMIT(RECORD_GENERAL_PROPERTY(
            nodeId,
            getLastPropertyRecordPos(nodeId, "xnMapOutputMode", undoPoint.GetPosition()),
            "xnMapOutputMode",
            &curVMD,
            sizeof(curVMD)
        ))
    undoPoint.Reuse();

    // xnPixelFormat
    EMIT(RECORD_INT_PROPERTY(
            nodeId,
            getLastPropertyRecordPos(nodeId, "xnPixelFormat", undoPoint.GetPosition()),
            "xnPixelFormat",
            toXnPixelFormat(curVideoMode.pixelFormat)
        ))
    undoPoint.Reuse();

	EMIT(RECORD_INT_PROPERTY(
		nodeId,
		getLastPropertyRecordPos(nodeId, "oniPixelFormat", undoPoint.GetPosition()),
		"oniPixelFormat",
		curVideoMode.pixelFormat
		))
		undoPoint.Reuse();

    XN_DELETE_ARR(pVideoModes);

    size = sizeof(XnFloat);
	float vdummy, hdummy;
    if ( pStream->getProperty(ONI_STREAM_PROPERTY_HORIZONTAL_FOV, &hdummy, &size) == ONI_STATUS_OK &&
         pStream->getProperty(ONI_STREAM_PROPERTY_VERTICAL_FOV,   &vdummy, &size) == ONI_STATUS_OK )
    {
		// xnFOV
		struct XnFieldOfView
		{
			/** Horizontal Field Of View, in radians. */
			XnDouble fHFOV;
			/** Vertical Field Of View, in radians. */
			XnDouble fVFOV;
		} fov = {hdummy, vdummy};

        EMIT(RECORD_GENERAL_PROPERTY(
                nodeId,
                getLastPropertyRecordPos(nodeId, "xnFOV", undoPoint.GetPosition()),
                "xnFOV",
                &fov,
                sizeof(fov)
            ))
        undoPoint.Reuse();
    }

	// xnCropping
	struct XnCropping
	{
		/** TRUE if cropping is turned on, FALSE otherwise. */
		XnBool bEnabled;
		/** Offset in the X-axis, in pixels. */
		XnUInt16 nXOffset;
		/** Offset in the Y-axis, in pixels. */
		XnUInt16 nYOffset;
		/** Number of pixels in the X-axis. */
		XnUInt16 nXSize;
		/** Number of pixels in the Y-axis. */
		XnUInt16 nYSize;
	} xncropping = {0};
	OniCropping cropping;
	size = sizeof(OniCropping);
	if (pStream->getProperty(ONI_STREAM_PROPERTY_CROPPING, &cropping, &size) == ONI_STATUS_OK)
	{
		// we support cropping capability
		EMIT(RECORD_INT_PROPERTY(
			nodeId,
			getLastPropertyRecordPos(nodeId, "Cropping", undoPoint.GetPosition()),
			"Cropping",
			TRUE
			));

		undoPoint.Reuse();

		xncropping.bEnabled = cropping.enabled;
		xncropping.nXOffset = (XnUInt16)cropping.originX;
		xncropping.nYOffset = (XnUInt16)cropping.originY;
		xncropping.nXSize = (XnUInt16)cropping.width;
		xncropping.nYSize = (XnUInt16)cropping.height;

		EMIT(RECORD_GENERAL_PROPERTY(
			nodeId,
			getLastPropertyRecordPos(nodeId, "xnCropping", undoPoint.GetPosition()),
			"xnCropping",
			&xncropping,
			sizeof(xncropping)
			))

		undoPoint.Reuse();
	}

	OniBool bMirror = FALSE;
	size = sizeof(bMirror);
	if (pStream->getProperty(ONI_STREAM_PROPERTY_MIRRORING, &bMirror, &size) == ONI_STATUS_OK)
	{
		// we support mirroring capability
		EMIT(RECORD_INT_PROPERTY(
			nodeId,
			getLastPropertyRecordPos(nodeId, "Mirror", undoPoint.GetPosition()),
			"Mirror",
			TRUE
			));

		undoPoint.Reuse();

		// and now tell the mirror state
		EMIT(RECORD_INT_PROPERTY(
			nodeId,
			getLastPropertyRecordPos(nodeId, "xnMirror", undoPoint.GetPosition()),
			"xnMirror",
			bMirror
			))

		undoPoint.Reuse();
	}

	m_propertyPriority = ms_priorityHigh;
    pStream->notifyAllProperties();
	m_propertyPriority = ms_priorityNormal;
    undoPoint.Release();
}
コード例 #28
0
ファイル: MockMapGenerator.cpp プロジェクト: 3david/OpenNI
XnStatus MockMapGenerator::SetGeneralProperty(const XnChar* strName, XnUInt32 nBufferSize, const void* pBuffer)
{
	XN_VALIDATE_INPUT_PTR(strName);
	XN_VALIDATE_INPUT_PTR(pBuffer);
	XnStatus nRetVal = XN_STATUS_OK;
	if (strcmp(strName, XN_PROP_MAP_OUTPUT_MODE) == 0)
	{
		if (nBufferSize != sizeof(m_mapOutputMode))
		{
			XN_LOG_ERROR_RETURN(XN_STATUS_INVALID_BUFFER_SIZE, XN_MASK_OPEN_NI, "Cannot set XN_PROP_MAP_OUTPUT_MODE - buffer size is incorrect");
		}
		const XnMapOutputMode* pOutputMode = (const XnMapOutputMode*)pBuffer;
		nRetVal = SetMapOutputMode(*pOutputMode);
		XN_IS_STATUS_OK(nRetVal);
	}
	else if (strcmp(strName, XN_PROP_SUPPORTED_MAP_OUTPUT_MODES) == 0)
	{
		if (m_bSupportedMapOutputModesCountReceived)		
		{
			m_bSupportedMapOutputModesCountReceived = FALSE; //For next time
			if (nBufferSize != m_nSupportedMapOutputModesCount * sizeof(XnMapOutputMode))
			{
				XN_LOG_ERROR_RETURN(XN_STATUS_INVALID_BUFFER_SIZE, XN_MASK_OPEN_NI, "Cannot set XN_PROP_SUPPORTED_MAP_OUTPUT_MODES - buffer size is incorrect");
			}

			XN_DELETE_ARR(m_pSupportedMapOutputModes);
			m_pSupportedMapOutputModes = XN_NEW_ARR(XnMapOutputMode, m_nSupportedMapOutputModesCount);
			XN_VALIDATE_ALLOC_PTR(m_pSupportedMapOutputModes);
			xnOSMemCopy(m_pSupportedMapOutputModes, pBuffer, nBufferSize);
		}
		else
		{
			XN_ASSERT(FALSE);
			XN_LOG_ERROR_RETURN(XN_STATUS_CORRUPT_FILE, XN_MASK_OPEN_NI, "Got XN_PROP_SUPPORTED_MAP_OUTPUT_MODES without XN_PROP_SUPPORTED_MAP_OUTPUT_MODES_COUNT before it");
		}
	}
	else if (strcmp(strName, XN_PROP_CROPPING) == 0)
	{
		if (nBufferSize != sizeof(m_cropping))
		{
			XN_LOG_ERROR_RETURN(XN_STATUS_INVALID_BUFFER_SIZE, XN_MASK_OPEN_NI, "Cannot set XN_PROP_CROPPING - buffer size is incorrect");
		}
		const XnCropping* pCropping = (const XnCropping*)pBuffer;
		nRetVal = SetCropping(*pCropping);
		XN_IS_STATUS_OK(nRetVal);
	}
	else if (strcmp(strName, XN_PROP_NEWDATA) == 0)
	{
		XnUInt32 nExpectedSize = GetExpectedBufferSize();
		if (nBufferSize != nExpectedSize)
		{
			xnLogWarning(XN_MASK_OPEN_NI, "%s: Got new data with illegal buffer size (%u) - ignoring.", m_strName, nBufferSize);
		}
		else
		{
			//Send it to be handled by our base class
			nRetVal = MockGenerator::SetGeneralProperty(strName, nBufferSize, pBuffer);
			XN_IS_STATUS_OK(nRetVal);
		}
	}
	else
	{
		nRetVal = MockGenerator::SetGeneralProperty(strName, nBufferSize, pBuffer);
		XN_IS_STATUS_OK(nRetVal);
	}

	return XN_STATUS_OK;
}