XnVMessageListener::XnVMessageListener(const XnChar* strName) :
	m_bInternalThreadAlive(false),
	m_bInternalThreadKill(false),
	m_bThreadProtectionQueueMode(false)
{
	xnLogVerbose(XNV_NITE_MASK_CREATE, "Listener %s [0x%08x]: Create", strName, this);

	xnOSCreateCriticalSection(&m_hListenerCS);

	SetCurrentThread();
	m_bThreadProtectionQueueMode = false;

	m_strListenerName = (XnChar*)xnOSCalloc(strlen(strName)+1, sizeof(XnChar));
	if (m_strListenerName != NULL)
	{
		xnOSStrCopy(m_strListenerName, strName, strlen(strName)+1);
	}

	m_pMessageQueue = XN_NEW(XnVMessageQueue);
	m_pMessageQueue->Init();

	m_pUpdateCBs = XN_NEW(XnVMessageSpecificEvent);
	m_pActivateCBs = XN_NEW(XnVEvent);
	m_pDeactivateCBs = XN_NEW(XnVEvent);
} // XnVMessageListener::XnVMessageListener
示例#2
0
OniStatus Context::recorderOpen(const char* fileName, OniRecorderHandle* pRecorder)
{
    // Validate parameters.
    if (NULL == pRecorder || NULL == fileName)
    {
        return ONI_STATUS_BAD_PARAMETER;
    }
    // Allocate the handle.
    *pRecorder = XN_NEW(_OniRecorder);
    if (NULL == *pRecorder)
    {
        return ONI_STATUS_ERROR;
    }
    // Create the recorder itself.
    if (NULL == ((*pRecorder)->pRecorder = XN_NEW(Recorder, m_frameManager, m_errorLogger, *pRecorder)))
    {
        XN_DELETE(*pRecorder);
        return ONI_STATUS_ERROR;
    }
    // Try to initialize the recorder, and add it to the list of known
    // recorders upon successful initialization.
    OniStatus status = (*pRecorder)->pRecorder->initialize(fileName);
    if (ONI_STATUS_OK == status) 
    {
        m_recorders.AddLast((*pRecorder)->pRecorder);
    }
    else
    {
        XN_DELETE((*pRecorder)->pRecorder);
    }
    return status;
}
示例#3
0
oni::driver::StreamBase* XnOniDevice::createStream(OniSensorType sensorType)
{
	XnOniStream* pStream;

	if (sensorType == ONI_SENSOR_DEPTH)
	{
		pStream = XN_NEW(XnOniDepthStream, &m_sensor, this);
	}
	else if (sensorType == ONI_SENSOR_COLOR)
	{
		pStream = XN_NEW(XnOniColorStream, &m_sensor, this);
	}
	else if (sensorType == ONI_SENSOR_IR)
	{
		pStream = XN_NEW(XnOniIRStream, &m_sensor, this);
	}
	else
	{
		m_driverServices.errorLoggerAppend("XnOniDevice: Can't create a stream of type %d", sensorType);
		return NULL;
	}

	XnStatus nRetVal = pStream->Init();
	if (nRetVal != XN_STATUS_OK)
	{
		m_driverServices.errorLoggerAppend("XnOniDevice: Can't initialize stream of type %d: %s", sensorType, xnGetStatusString(nRetVal));
		XN_DELETE(pStream);
		return NULL;
	}

	return pStream;
}
示例#4
0
XnVFlowRouter::XnVFlowRouter(const XnChar* strName) :
	XnVMessageListener(strName),
	m_pActive(NULL)
{
	m_pProperties = XN_NEW(XnStringsHash);
	XnVMultipleHands* pHands = XN_NEW(XnVMultipleHands);
	m_pProperties->Set(FR_PROPERTY_HANDS, pHands);

} // XnVFlowRouter::XnVFlowRouter
	oni::driver::StreamBase* createStream(OniSensorType sensorType)
	{
		if (sensorType == ONI_SENSOR_COLOR) {
            return XN_NEW(KinectV2ColorStream, kinect_);
		}
        else  if (sensorType == ONI_SENSOR_DEPTH) {
            return XN_NEW(KinectV2DepthStream, kinect_);
		}

		return NULL;
	}
示例#6
0
// Normal ctors
XnVSlider1D::XnVSlider1D(XnVAxis eAxis, XnBool bDraggable, const XnPoint3D& ptInitialPosition,
						 XnFloat fSliderLength, XnFloat fInitialValue, XnFloat fMinOutput, XnFloat fMaxOutput,
						 XnFloat fOffAxisDetectionAngle, XnFloat fOffAxisDetectionMinimumVelocity) :
	m_pPointBuffer(NULL)
{
	m_pValueChangeCBs = XN_NEW(XnVFloatSpecificEvent);
	m_pOffAxisMovementCBs = XN_NEW(XnVDirectionSpecificEvent);

	m_nOffAxisDetectionTime = ms_nDefaultTimeForOffAxisDetection;

	Initialize(eAxis, bDraggable, ptInitialPosition, fSliderLength, fInitialValue, fMinOutput, fMaxOutput,
		fOffAxisDetectionAngle, fOffAxisDetectionMinimumVelocity);
} // XnVSlider1D::XnVSlider1D
示例#7
0
VideoStream::VideoStream(void* streamHandle, const OniSensorInfo* pSensorInfo, Device& device, const DriverHandler& libraryHandler, xnl::ErrorLogger& errorLogger) :
	m_errorLogger(errorLogger),
	m_pSensorInfo(NULL),
	m_running(true),
	m_device(device),
	m_driverHandler(libraryHandler),
	m_streamHandle(streamHandle),
	m_pContextNewFrameEvent(NULL),
	m_started(FALSE)
{
	xnOSCreateEvent(&m_newFrameInternalEvent, false);
	xnOSCreateEvent(&m_newFrameInternalEventForFrameHolder, false);
	xnOSCreateThread(newFrameThread, this, &m_newFrameThread);
	
	m_pSensorInfo = XN_NEW(OniSensorInfo);
	m_pSensorInfo->sensorType = pSensorInfo->sensorType;
	m_pSensorInfo->numSupportedVideoModes = pSensorInfo->numSupportedVideoModes;
	m_pSensorInfo->pSupportedVideoModes = XN_NEW_ARR(OniVideoMode, m_pSensorInfo->numSupportedVideoModes);
	xnOSMemCopy(m_pSensorInfo->pSupportedVideoModes, pSensorInfo->pSupportedVideoModes, sizeof(OniVideoMode)*m_pSensorInfo->numSupportedVideoModes);

	m_driverHandler.streamSetNewFrameCallback(m_streamHandle, stream_NewFrame, this);
    m_driverHandler.streamSetPropertyChangedCallback(m_streamHandle, stream_PropertyChanged, this);

	refreshWorldConversionCache();
}
	virtual OniStatus initialize(
		oni::driver::DeviceConnectedCallback connectedCallback,
		oni::driver::DeviceDisconnectedCallback disconnectedCallback,
		oni::driver::DeviceStateChangedCallback deviceStateChangedCallback,
		void* pCookie)
	{
		oni::driver::DriverBase::initialize(connectedCallback, disconnectedCallback, deviceStateChangedCallback, pCookie);

        // Open Kinect v2
        auto hr = ::GetDefaultKinectSensor( &kinect_ );
        if ( FAILED( hr ) ) {
			return ONI_STATUS_NO_DEVICE;
        }
        
        hr = kinect_->Open();
        if (FAILED(hr)) {
            std::cerr << "IKinectSensor::Open() failed." << std::endl;
			return ONI_STATUS_ERROR;
        }

		// Create device info
		OniDeviceInfo* pInfo = XN_NEW(OniDeviceInfo);
		xnOSStrCopy(pInfo->vendor, "Microsoft", ONI_MAX_STR);
		xnOSStrCopy(pInfo->name, "Kinect V2 Developer Preview", ONI_MAX_STR);
		xnOSStrCopy(pInfo->uri, "Kinect V2", ONI_MAX_STR);

		// internal connect device
		deviceConnected(pInfo);
		deviceStateChanged(pInfo, 0);

		return ONI_STATUS_OK;
	}
示例#9
0
 static DumpData& GetInstance()
 {
     // NOTE: this instance will never be destroyed (because some static object destructor might write/close dumps, and
     // destruction order is not-deterministic).
     static DumpData* pSingleton = XN_NEW(DumpData);
     return *pSingleton;
 }
示例#10
0
XnStatus XnSensorClient::CreateStreamModule(const XnChar* StreamType, const XnChar* StreamName, XnDeviceModuleHolder** ppStreamHolder)
{
	XnStatus nRetVal = XN_STATUS_OK;

	XnSensorClientStream* pStream;
	if (strcmp(StreamType, XN_STREAM_TYPE_AUDIO) == 0)
	{
		XN_VALIDATE_NEW(pStream, XnSensorClientAudioStream, this, StreamType, StreamName);
	}
	else
	{
		XN_VALIDATE_NEW(pStream, XnSensorClientFrameStream, this, StreamType, StreamName);
	}

	XnStreamReaderStreamHolder* pHolder = XN_NEW(XnStreamReaderStreamHolder, pStream);
	if (pHolder == NULL)
	{
		XN_DELETE(pStream);
		return XN_STATUS_ALLOC_FAILED;
	}

	*ppStreamHolder = pHolder;

	return (XN_STATUS_OK);
}
示例#11
0
XnNodeManager::XnNodeManager()
{
    xnOSCreateCriticalSection(&m_hCriticalSection);
    //TODO: Handle critical section creation failure

    m_nCurrentAvailability = 0;
    m_nCurrentCapacity = 0;
    m_nCurrentOccupancy = 0;
    m_pFirstAvailable = NULL;

    m_eInitializationState = XN_NM_INIT_STATE_CREATE_INTERNAL_LIST;
    m_pAllNodes = XN_NEW(XnList, this);
    if (m_pAllNodes == NULL)
    {
        // OZOZ: Allocation failed in ctor...
    }

    m_eInitializationState = XN_NM_INIT_STATE_CREATE_FIRST_LINK;
    XnStatus rc = Resize(nInitialSize);
    if (rc != XN_STATUS_OK)
    {
        // OZOZ: Allocation failed in ctor...
    }

    m_eInitializationState = XN_NM_INIT_STATE_DONE;
}
示例#12
0
	oni::driver::StreamBase* createStream(OniSensorType sensorType)
	{
		if (sensorType == ONI_SENSOR_DEPTH)
		{
			OzDepthStream* pDepth = XN_NEW(OzDepthStream);
			return pDepth;
		}
		if (sensorType == ONI_SENSOR_COLOR)
		{
			OzImageStream* pImage = XN_NEW(OzImageStream);
			return pImage;
		}

		m_driverServices.errorLoggerAppend("OzDevice: Can't create a stream of type %d", sensorType);
		return NULL;
	}
示例#13
0
oni::driver::DeviceBase* XnOniDriver::deviceOpen(const char* uri, const char* mode)
{
	XnOniDevice* pDevice = NULL;

	// if device was already opened for this uri, return the previous one
	if (m_devices.Get(uri, pDevice) == XN_STATUS_OK)
	{
		getServices().errorLoggerAppend("Device is already open.");
		return NULL;
	}

	pDevice = XN_NEW(XnOniDevice, uri, getServices(), this);
	XnStatus nRetVal = pDevice->Init(mode);
	if (nRetVal != XN_STATUS_OK)
	{
		getServices().errorLoggerAppend("Could not open \"%s\": %s", uri, xnGetStatusString(nRetVal));
		return NULL;
	}

	// Register to error state property changed.
	XnCallbackHandle handle;
	nRetVal = pDevice->GetSensor()->RegisterToPropertyChange(XN_MODULE_NAME_DEVICE, 
																XN_MODULE_PROPERTY_ERROR_STATE, 
																OnDevicePropertyChanged, pDevice, handle);
	if (nRetVal != XN_STATUS_OK)
	{
		XN_DELETE(pDevice);
		return NULL;
	}

	// Add the device and return it.
	m_devices[uri] = pDevice;
	return pDevice;
}
示例#14
0
OniStatus PlayerDriver::tryDevice(const char* strUri)
{
	static XnPlayerInputStreamInterface inputInterface = 
	{
		FileOpen,
		FileRead,
		NULL,
		NULL,
		FileClose,
		NULL,
		NULL,
	};

	// Store the file path.
	m_filePath = strUri;

	XnStatus rc = PlayerNode::ValidateStream(this, &inputInterface);
	if (rc == XN_STATUS_OK)
	{
		OniDeviceInfo* pInfo = XN_NEW(OniDeviceInfo);
		xnOSMemSet(pInfo, 0, sizeof(*pInfo));
		xnOSStrCopy(pInfo->uri,    strUri,               ONI_MAX_STR);
		xnOSStrCopy(pInfo->vendor, kVendorString.Data(), ONI_MAX_STR);
		xnOSStrCopy(pInfo->name,   kDeviceName.Data(),   ONI_MAX_STR);
		deviceConnected(pInfo);
		return ONI_STATUS_OK;
	}

	return DriverBase::tryDevice(strUri);
}
示例#15
0
OniStatus Context::streamDestroy(VideoStream* pStream)
{
	OniStatus rc = ONI_STATUS_OK;

	if (pStream == NULL)
	{
		return ONI_STATUS_OK;
	}

	// Make sure the stream is stopped.
	pStream->stop();

	m_cs.Lock();

	// Remove the stream from the streams list.
	m_streams.Remove(pStream);

	m_cs.Unlock();

	// Lock stream's frame holder.
	FrameHolder* pFrameHolder = pStream->getFrameHolder();
	pFrameHolder->setEnabled(FALSE);
	pFrameHolder->lock();
	pFrameHolder->clear();

	// Get the frame holder's streams.
	int numStreams = pFrameHolder->getNumStreams();
	xnl::Array<VideoStream*> pStreamList(numStreams);
	pStreamList.SetSize(numStreams);
	pFrameHolder->getStreams(pStreamList.GetData(), &numStreams);

	// Change holder to all the streams (allocate new StreamFrameHolder).
	for (int i = 0; i < numStreams; ++i)
	{
		if (pStreamList[i] != pStream)
		{
			// Allocate new frame holder.
			StreamFrameHolder* pStreamFrameHolder = XN_NEW(StreamFrameHolder, m_frameManager, pStreamList[i]);
			if (pStreamFrameHolder == NULL)
			{
				rc = ONI_STATUS_ERROR;
				continue;
			}

			// Replace the holder in the stream.
			pStreamList[i]->setFrameHolder(pStreamFrameHolder);
		}
	}

	pFrameHolder->unlock();

	// Delete the stream object and handle.
	XN_DELETE(pStream);

	// Delete the frame holder.
	XN_DELETE(pFrameHolder);

	return rc;
}
	virtual oni::driver::DeviceBase* deviceOpen(const char* uri, const char* /*mode*/)
	{
        if ( !kinect_ ) {
            return 0;
        }

		return XN_NEW(KinectV2Device, getServices(), kinect_);
	}
示例#17
0
XnNodeManager* XnNodeManager::GetInstance()
{
    // NOTE: we *never* free the NodeManager instance, as it should always exist. Global variables dtors
    // might still use lists and hashs to do their work.
    // Instead, we let the OS free its memory and resources.
    static XnNodeManager* pNM = XN_NEW(XnNodeManager);
    return pNM;
}
示例#18
0
oni::driver::StreamBase* DS325Device::createStream(OniSensorType sensorType) {
	if(sensorType == ONI_SENSOR_DEPTH) {
		DepthDS325Stream* pDepth = XN_NEW(DepthDS325Stream);
		return pDepth;
	}
	if(sensorType == ONI_SENSOR_IR) {
		IRDS325Stream* pIR = XN_NEW(IRDS325Stream);
		return pIR;
	}
	if(sensorType == ONI_SENSOR_COLOR) {
		ImageDS325Stream* pImage = XN_NEW(ImageDS325Stream);
		return pImage;
	}

	m_driverServices.errorLoggerAppend("DS325Device: Can't create a stream of type %d", sensorType);
	return NULL;
}
示例#19
0
XnVPointDenoiser::XnVPointDenoiser(XnFloat fDistanceThreshold, const XnChar* strName) :
	XnVPointFilter(strName),
	m_fDistanceThreshold(fDistanceThreshold),
	m_fCloseRatio(ms_fDefaultCloseRatio),
	m_fFarRatio(ms_fDefaultFarRatio)
{
	m_pActivePoints = XN_NEW(XnVIntLocalHash);
}
示例#20
0
XnStatus XnSensor::CreateStreamModule(const XnChar* StreamType, const XnChar* StreamName, XnDeviceModuleHolder** ppStreamHolder)
{
	XnStatus nRetVal = XN_STATUS_OK;

	// make sure reading from streams is turned on
	if (!m_ReadData.GetValue())
	{
		nRetVal = m_ReadData.SetValue(TRUE);
		XN_IS_STATUS_OK(nRetVal);
	}

	XnDeviceStream* pStream;
	XnSensorStreamHelper* pHelper;

	// create stream
	if (strcmp(StreamType, XN_STREAM_TYPE_DEPTH) == 0)
	{
		XnSensorDepthStream* pDepthStream;
		XN_VALIDATE_NEW(pDepthStream, XnSensorDepthStream, StreamName, &m_Objects);
		pStream = pDepthStream;
		pHelper = pDepthStream->GetHelper();
	}
	else if (strcmp(StreamType, XN_STREAM_TYPE_IMAGE) == 0)
	{
		XnSensorImageStream* pImageStream;
		XN_VALIDATE_NEW(pImageStream, XnSensorImageStream, StreamName, &m_Objects);
		pStream = pImageStream;
		pHelper = pImageStream->GetHelper();
	}
	else if (strcmp(StreamType, XN_STREAM_TYPE_IR) == 0)
	{
		XnSensorIRStream* pIRStream;
		XN_VALIDATE_NEW(pIRStream, XnSensorIRStream, StreamName, &m_Objects);
		pStream = pIRStream;
		pHelper = pIRStream->GetHelper();
	}
	else if (strcmp(StreamType, XN_STREAM_TYPE_AUDIO) == 0)
	{
		if (!m_Firmware.GetInfo()->bAudioSupported)
		{
			XN_LOG_WARNING_RETURN(XN_STATUS_UNSUPPORTED_STREAM, XN_MASK_DEVICE_SENSOR, "Audio is not supported by this FW!");
		}

		// TODO: use the allow other users property when constructing the audio stream
		XnSensorAudioStream* pAudioStream;
		XN_VALIDATE_NEW(pAudioStream, XnSensorAudioStream, GetUSBPath(), StreamName, &m_Objects, FALSE);
		pStream = pAudioStream;
		pHelper = pAudioStream->GetHelper();
	}
	else
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_UNSUPPORTED_STREAM, XN_MASK_DEVICE_SENSOR, "Unsupported stream type: %s", StreamType);
	}

	*ppStreamHolder = XN_NEW(XnSensorStreamHolder, pStream, pHelper);

	return (XN_STATUS_OK);
}
示例#21
0
XnDumpFile* xnDumpFileOpenImpl(const XnChar* strDumpName, XnBool bForce, XnBool bSessionDump, const XnChar* strNameFormat, va_list args)
{
    XnStatus nRetVal = XN_STATUS_OK;

    DumpData& dumpData = DumpData::GetInstance();

    // check if there are writers
    if (dumpData.writers.IsEmpty())
    {
        return NULL;
    }

    if (!bForce)
    {
        if (!xnLogIsDumpMaskEnabled(strDumpName))
        {
            return NULL;
        }
    }

    // format file name
    XnChar strFileName[XN_FILE_MAX_PATH];
    XnUInt32 nChars;
    nRetVal = xnOSStrFormatV(strFileName, XN_FILE_MAX_PATH, &nChars, strNameFormat, args);
    if (nRetVal != XN_STATUS_OK)
    {
        XN_ASSERT(FALSE);
        return NULL;
    }

    // create a handle that will hold all handles to all writers
    XnDumpFile* pFile = XN_NEW(XnDumpFile);

    // try to add writers
    for (XnDumpWriters::Iterator it = dumpData.writers.Begin(); it != dumpData.writers.End(); ++it)
    {
        XnDumpWriterFile writerFile;
        writerFile.pWriter = *it;
        writerFile.hFile = writerFile.pWriter->OpenFile(writerFile.pWriter->pCookie, strDumpName, bSessionDump, strFileName);
        XN_ASSERT(writerFile.hFile.pInternal != NULL);
        if (writerFile.hFile.pInternal != NULL)
        {
            nRetVal = pFile->m_writersFiles.AddLast(writerFile);
            XN_ASSERT(nRetVal == XN_STATUS_OK);
        }
    }

    // check if any writer succeeded
    if (pFile->m_writersFiles.IsEmpty())
    {
        // no file. Release memory
        XN_DELETE(pFile);
        return NULL;
    }

    // return the file pointer
    return pFile;
}
示例#22
0
XnVObjectID xnvFlowRouter_Create(const XnChar* strName)
{
	XnVFlowRouter* pFlowRouter = XN_NEW(XnVFlowRouter, strName);
	XnVObjectID id = pFlowRouter;//AllocateID();
	g_FlowRouters.Set(id, pFlowRouter);
	xnvMessageListener_AddToHash(id, pFlowRouter);

	return id;
}
示例#23
0
XnVSlider1D::XnVSlider1D(XnVAxis eAxis, const XnPoint3D& ptInitialPosition, XnPoint3D ptMinPoint, XnPoint3D ptMaxPoint,
						 XnFloat fMinOutput, XnFloat fMaxOutput, XnFloat fOffAxisDetectionAngle,
						 XnFloat fOffAxisDetectionMinimumVelocity) :
	m_pPointBuffer(NULL)
{
	m_pValueChangeCBs = XN_NEW(XnVFloatSpecificEvent);
	m_pOffAxisMovementCBs = XN_NEW(XnVDirectionSpecificEvent);

	XnFloat fSliderLength;
	XnFloat fInitialValue;
	m_nOffAxisDetectionTime = ms_nDefaultTimeForOffAxisDetection;

	InitializeFromPoint(ptInitialPosition, ptMinPoint, ptMaxPoint, eAxis, fSliderLength, fInitialValue);

	// All absolute sliders are not draggable!
	Initialize(eAxis, false, ptInitialPosition, fSliderLength, fInitialValue, fMinOutput, fMaxOutput,
		fOffAxisDetectionAngle, fOffAxisDetectionMinimumVelocity);
} // XnVSlider1D::XnVSlider1D
示例#24
0
OniStatus Context::createStream(OniDeviceHandle device, OniSensorType sensorType, OniStreamHandle* pStream)
{

	// Create the stream.
	Device* pDevice = device->pDevice;
	VideoStream* pMyStream = pDevice->createStream(sensorType);
	if (pMyStream == NULL)
	{
		m_errorLogger.Append("Context: Couldn't create stream from device:%08x, source: %d", device, sensorType);
		return ONI_STATUS_ERROR;
	}

	pMyStream->setNewFrameCallback(newFrameCallback, this);

	// Create stream frame holder and connect it to the stream.
	StreamFrameHolder* pFrameHolder = XN_NEW(StreamFrameHolder, m_frameManager, pMyStream);
	if (pFrameHolder == NULL)
	{
		m_errorLogger.Append("Context: Couldn't create stream frame holder from device:%08x, source: %d", device, sensorType);
		XN_DELETE(pMyStream);
		return ONI_STATUS_ERROR;
	}
	pMyStream->setFrameHolder(pFrameHolder);

	// Create handle object.
	_OniStream* pStreamHandle = XN_NEW(_OniStream);
	if (pStreamHandle == NULL)
	{
		m_errorLogger.Append("Couldn't allocate memory for StreamHandle");
		XN_DELETE(pFrameHolder);
		pFrameHolder = NULL;
		XN_DELETE(pMyStream);
		pMyStream = NULL;
		return ONI_STATUS_ERROR;
	}
	*pStream = pStreamHandle;
	pStreamHandle->pStream = pMyStream;

	m_cs.Lock();
	m_streams.AddLast(pMyStream);
	m_cs.Unlock();

	return ONI_STATUS_OK;
}
示例#25
0
OniStatus Context::enableFrameSyncEx(VideoStream** pStreams, int numStreams, DeviceDriver* pDeviceDriver, OniFrameSyncHandle* pFrameSyncHandle)
{
	// Make sure the device driver is valid.
	if (pDeviceDriver == NULL)
	{
		return ONI_STATUS_ERROR;
	}

	// Create the new frame sync group (it will link all the streams).
	SyncedStreamsFrameHolder* pSyncedStreamsFrameHolder = XN_NEW(SyncedStreamsFrameHolder, 
																	m_frameManager, pStreams, numStreams);
	XN_VALIDATE_PTR(pSyncedStreamsFrameHolder, ONI_STATUS_ERROR);

	// Configure frame-sync group in driver.
	void* driverHandle = pDeviceDriver->enableFrameSync(pStreams, numStreams);
	XN_VALIDATE_PTR(driverHandle, ONI_STATUS_ERROR);

	// Return the frame sync handle.
	*pFrameSyncHandle = XN_NEW(_OniFrameSync);
	if (*pFrameSyncHandle == NULL)
	{
		m_errorLogger.Append("Couldn't allocate memory for FrameSyncHandle");
		return ONI_STATUS_ERROR;
	}
	(*pFrameSyncHandle)->pSyncedStreamsFrameHolder = pSyncedStreamsFrameHolder;
	(*pFrameSyncHandle)->pDeviceDriver = pDeviceDriver;
	(*pFrameSyncHandle)->pFrameSyncHandle = driverHandle;

	// Update the frame holders of all the streams.
	pSyncedStreamsFrameHolder->lock();
	for (int j = 0; j < numStreams; ++j)
	{
		FrameHolder* pOldFrameHolder = pStreams[j]->getFrameHolder();
		pOldFrameHolder->lock();
		pOldFrameHolder->setStreamEnabled(pStreams[j], FALSE);
		pStreams[j]->setFrameHolder(pSyncedStreamsFrameHolder);
		pOldFrameHolder->unlock();
		XN_DELETE(pOldFrameHolder);
	}
	pSyncedStreamsFrameHolder->unlock();

	return ONI_STATUS_OK;

}
示例#26
0
XnVPushDetector::XnVPushDetector(const XnChar* strName) :
	XnVPointControl(strName),
	m_bPushDetected(false),
	m_nPushImmediateOffset(0),
	m_nPushImmediateDuration(240),
	m_nPushPreviousOffset(240),	// ms
	m_nPushPreviousDuration(150),
	m_fPushImmediateMinVelocity(0.33f),	// (m/s)
	m_fPushPreviousMinVelocity(0.17f),	// (m/s)
	m_fPushMinAngleImmediateAndPrevious(20.0f),
	m_fPushMaxAngleFromZ(30.0f),
	m_fStableMaxVelocity(0.13f),	// Speed (in (m/s)) under which to consider stable
	m_nStableDuration(360) // ms
{
	m_pPoints = XN_NEW(XnVPointBuffer);

	m_pPushCBs = XN_NEW(XnVFloatFloatSpecificEvent);
	m_pStabilizedCBs = XN_NEW(XnVFloatSpecificEvent);
} // XnVPushDetector::XnVPushDetector
XnStatus XnSharedMemoryBufferPool::AllocateBuffers()
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	if (m_nBufferSize > m_nMaxBufferSize)
	{
		return XN_STATUS_ALLOC_FAILED;
	}

	if (m_pSharedMemoryAddress != NULL)
	{
		// already allocated. nothing to do here
		return (XN_STATUS_OK);
	}

	// first time. allocate shared memory
	XnUInt32 nTotalSize = m_nMaxBufferSize * m_nBufferCount;
	nRetVal = xnOSCreateSharedMemory(m_strName, nTotalSize, XN_OS_FILE_READ | XN_OS_FILE_WRITE, &m_hSharedMemory);
	XN_IS_STATUS_OK(nRetVal);

	void* pAddress;
	nRetVal = xnOSSharedMemoryGetAddress(m_hSharedMemory, &pAddress);
	if (nRetVal != XN_STATUS_OK)
	{
		xnOSCloseSharedMemory(m_hSharedMemory);
		m_hSharedMemory = NULL;
		return (nRetVal);
	}

	m_pSharedMemoryAddress = (XnUChar*)pAddress;

	// now allocate buffers
	for (XnUInt32 i = 0; i < m_nBufferCount; ++i)
	{
		XnBufferInPool* pBuffer = XN_NEW(XnBufferInPool);
		if (pBuffer == NULL)
		{
			Free();
			return (XN_STATUS_ALLOC_FAILED);
		}

		pBuffer->m_nID = i;

		pBuffer->SetExternalBuffer(m_pSharedMemoryAddress + i*m_nMaxBufferSize, m_nMaxBufferSize);

		xnDumpWriteString(Dump(), "Allocated buffer %u with size %u\n", i, m_nMaxBufferSize);

		// add it to free list
		m_AllBuffers.AddLast(pBuffer);
		m_FreeBuffers.AddLast(pBuffer);
	}

	return (XN_STATUS_OK);
}
LinkMsgParser* LinkInputStream::CreateLinkMsgParser()
{
	if (m_outputFormat  == XN_FORMAT_PASS_THROUGH_RAW)
	{
		return XN_NEW(LinkMsgParser);
	} else {
		xnLogError(XN_MASK_LINK, "Unknown output format: %d", m_outputFormat);
		XN_ASSERT(FALSE);
		return NULL;
	}
}
示例#29
0
void XN_CALLBACK_TYPE PlayerDriver::EOFReached(void* pCookie, const char *strUri)
{
	PlayerDriver* pThis = (PlayerDriver*)pCookie;

	OniDeviceInfo* pInfo = XN_NEW(OniDeviceInfo);
	xnOSMemSet(pInfo, 0, sizeof(*pInfo));
	xnOSStrCopy(pInfo->uri,    strUri,               ONI_MAX_STR);
	xnOSStrCopy(pInfo->vendor, kVendorString.Data(), ONI_MAX_STR);
	xnOSStrCopy(pInfo->name,   kDeviceName.Data(),   ONI_MAX_STR);

	pThis->deviceStateChanged(pInfo, ONI_DEVICE_STATE_EOF);
}
示例#30
0
XnStatus XnSensor::CreateStreamModule(const XnChar* StreamType, const XnChar* StreamName, XnDeviceModuleHolder** ppStreamHolder)
{
	XnStatus nRetVal = XN_STATUS_OK;

	// make sure reading from streams is turned on
	if (!m_ReadData.GetValue())
	{
		nRetVal = m_ReadData.SetValue(TRUE);
		XN_IS_STATUS_OK(nRetVal);
	}

	XnDeviceStream* pStream;
	XnSensorStreamHelper* pHelper;

	// create stream
	if (strcmp(StreamType, XN_STREAM_TYPE_DEPTH) == 0)
	{
		XnSensorDepthStream* pDepthStream;
		XN_VALIDATE_NEW(pDepthStream, XnSensorDepthStream, GetUSBPath(), StreamName, &m_Objects, m_NumberOfBuffers.GetValue());
		pStream = pDepthStream;
		pHelper = pDepthStream->GetHelper();
	}
	else if (strcmp(StreamType, XN_STREAM_TYPE_IMAGE) == 0)
	{
		XnSensorImageStream* pImageStream;
		XN_VALIDATE_NEW(pImageStream, XnSensorImageStream, GetUSBPath(), StreamName, &m_Objects, m_NumberOfBuffers.GetValue());
		pStream = pImageStream;
		pHelper = pImageStream->GetHelper();
	}
	else if (strcmp(StreamType, XN_STREAM_TYPE_IR) == 0)
	{
		XnSensorIRStream* pIRStream;
		XN_VALIDATE_NEW(pIRStream, XnSensorIRStream, GetUSBPath(), StreamName, &m_Objects, m_NumberOfBuffers.GetValue());
		pStream = pIRStream;
		pHelper = pIRStream->GetHelper();
	}
	else if (strcmp(StreamType, XN_STREAM_TYPE_AUDIO) == 0)
	{
		XnSensorAudioStream* pAudioStream;
		XN_VALIDATE_NEW(pAudioStream, XnSensorAudioStream, GetUSBPath(), StreamName, &m_Objects);
		pStream = pAudioStream;
		pHelper = pAudioStream->GetHelper();
	}
	else
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_UNSUPPORTED_STREAM, XN_MASK_DEVICE_SENSOR, "Unsupported stream type: %s", StreamType);
	}

	*ppStreamHolder = XN_NEW(XnSensorStreamHolder, pStream, pHelper);

	return (XN_STATUS_OK);
}