Пример #1
1
XnStatus PlayerNode::HandleDataIndexRecord(DataIndexRecordHeader record, XnBool bReadPayload)
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	XN_VALIDATE_INPUT_PTR(m_pNodeNotifications);
	nRetVal = record.Decode();
	XN_IS_STATUS_OK(nRetVal);
	DEBUG_LOG_RECORD(record, "DataIndex");

	XN_ASSERT(record.GetNodeID() != INVALID_NODE_ID);
	PlayerNodeInfo* pPlayerNodeInfo = GetPlayerNodeInfo(record.GetNodeID());
	XN_VALIDATE_PTR(pPlayerNodeInfo, XN_STATUS_CORRUPT_FILE);

	XnUInt32 nRecordTotalSize = record.GetSize() + record.GetPayloadSize();
	if (nRecordTotalSize > RECORD_MAX_SIZE)
	{
		XN_ASSERT(FALSE);
		XN_LOG_ERROR_RETURN(XN_STATUS_INTERNAL_BUFFER_TOO_SMALL, XN_MASK_OPEN_NI, "Record size %u is larger than player internal buffer", nRecordTotalSize);
	}

	if (bReadPayload)
	{
		// make sure node exists
		if (!pPlayerNodeInfo->bValid)
		{
			XN_ASSERT(FALSE);
			return XN_STATUS_CORRUPT_FILE;
		}

		if (record.GetPayloadSize() != (pPlayerNodeInfo->nFrames+1) * sizeof(DataIndexEntry))
		{
			XN_ASSERT(FALSE);
			XN_LOG_WARNING_RETURN(XN_STATUS_CORRUPT_FILE, XN_MASK_OPEN_NI, "Seek table has %u entries, but node has %u frames!", record.GetPayloadSize() / sizeof(DataIndexEntry), pPlayerNodeInfo->nFrames);
		}

		// allocate our data index
		pPlayerNodeInfo->pDataIndex = (DataIndexEntry*)xnOSCalloc(pPlayerNodeInfo->nFrames+1, sizeof(DataIndexEntry));
		XN_VALIDATE_ALLOC_PTR(pPlayerNodeInfo->pDataIndex);

		//Now read the actual data
		XnUInt32 nBytesRead = 0;
		nRetVal = Read(pPlayerNodeInfo->pDataIndex, record.GetPayloadSize(), nBytesRead);
		XN_IS_STATUS_OK(nRetVal);
		if (nBytesRead < record.GetPayloadSize())
		{
			XN_LOG_ERROR_RETURN(XN_STATUS_CORRUPT_FILE, XN_MASK_OPEN_NI, "Not enough bytes read");
		}
	}
	else
	{
		//Just skip the data
		nRetVal = SkipRecordPayload(record);
		XN_IS_STATUS_OK(nRetVal);
	}

	return XN_STATUS_OK;
}
Пример #2
0
XnStatus XnSensorClient::ReadInitialState(XnPropertySet* pSet)
{
	XnStatus nRetVal = XN_STATUS_OK;

	// first message should be either the initial state or an error message (if server failed to open
	// sensor)
	XnPackedDataType nType;
	nRetVal = GetDataPacker()->ReadNextObject(&nType);
	XN_IS_STATUS_OK(nRetVal);

	if (nType == XN_SENSOR_SERVER_MESSAGE_GENERAL_OP_RESPOND)
	{
		// check the error code
		XnUInt32 nDataSize = sizeof(m_LastReply);
		nRetVal = GetDataPacker()->ReadCustomData(nType, &m_LastReply, &nDataSize);
		XN_IS_STATUS_OK(nRetVal);

		XN_LOG_WARNING_RETURN(m_LastReply.nRetVal, XN_MASK_SENSOR_SERVER, "Server returned an error: %s", xnGetStatusString(m_LastReply.nRetVal));
	}
	else if (nType == XN_PACKED_PROPERTY_SET)
	{
		nRetVal = GetDataPacker()->ReadPropertySet(pSet);
		XN_IS_STATUS_OK(nRetVal);
	}
	else
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_PROTOCOL_UNKNOWN_ERROR, XN_MASK_DDK, "Unexpected message: %d (should start with a GENERAL_OP_RESPOND or PROPERTY_SET)", nType);
	}

	return (XN_STATUS_OK);
}
Пример #3
0
XnStatus XnSensorClient::WaitForReply(XnSensorServerCustomMessages ExpectedMessage)
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	// wait for event
	nRetVal = xnOSWaitEvent(m_hReplyEvent, XN_SENSOR_REPLY_TIMEOUT);
	if (nRetVal != XN_STATUS_OK)
	{
		XN_LOG_WARNING_RETURN(nRetVal, XN_MASK_SENSOR_SERVER, "Timeout when waiting for reply from sensor server!");
	}

	// reset it
	nRetVal = xnOSResetEvent(m_hReplyEvent);
	XN_IS_STATUS_OK(nRetVal);

	// check error code
	if (m_LastReply.nRetVal != XN_STATUS_OK)
	{
		XN_LOG_WARNING_RETURN(m_LastReply.nRetVal, XN_MASK_SENSOR_SERVER, "Server returned an error: %s", xnGetStatusString(m_LastReply.nRetVal));
	}

	if (m_LastReply.Type != ExpectedMessage)
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_ERROR, XN_MASK_SENSOR_SERVER, "Sensor server protocol error - invalid reply type!");
	}

	return (XN_STATUS_OK);
}
Пример #4
0
XnStatus XnSensorIRStream::SetResolution(XnResolutions nResolution)
{
	XnStatus nRetVal = XN_STATUS_OK;

	switch (nResolution)
	{
	case XN_RESOLUTION_QVGA:
	case XN_RESOLUTION_VGA:
		break;
	case XN_RESOLUTION_SXGA:
		if (m_Helper.GetFirmwareVersion() < XN_SENSOR_FW_VER_5_1)
		{
			XN_LOG_WARNING_RETURN(XN_STATUS_IO_INVALID_STREAM_IMAGE_RESOLUTION, XN_MASK_DEVICE_SENSOR, "IR resolution is not supported by this firmware!");
		}
		break;
	default:
		XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Unsupported IR resolution: %d", nResolution);
	}

	nRetVal = m_Helper.BeforeSettingFirmwareParam(ResolutionProperty(), nResolution);
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = XnIRStream::SetResolution(nResolution);
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = m_Helper.AfterSettingFirmwareParam(ResolutionProperty());
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
XnStatus XnSensorDepthStream::SetInputFormat(XnIODepthFormats nInputFormat)
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	switch (nInputFormat)
	{
	case XN_IO_DEPTH_FORMAT_COMPRESSED_PS:
	case XN_IO_DEPTH_FORMAT_UNCOMPRESSED_16_BIT:
		break;
	case XN_IO_DEPTH_FORMAT_UNCOMPRESSED_11_BIT:
		if (m_Helper.GetFirmwareVersion() < XN_SENSOR_FW_VER_4_0)
		{
			XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_UNSUPPORTED_MODE, XN_MASK_DEVICE_SENSOR, "11-bit depth is not supported on this sensor!");
		}
		break;
	case XN_IO_DEPTH_FORMAT_UNCOMPRESSED_12_BIT:
		if (m_Helper.GetFirmwareVersion() < XN_SENSOR_FW_VER_4_0)
		{
			XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_UNSUPPORTED_MODE, XN_MASK_DEVICE_SENSOR, "12-bit depth is not supported on this sensor!");
		}
		break;
	default:
		XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Unknown depth input format: %d", nInputFormat);
	}
	
	nRetVal = m_Helper.SimpleSetFirmwareParam(m_InputFormat, (XnUInt16)nInputFormat);
	XN_IS_STATUS_OK(nRetVal);
	
	return (XN_STATUS_OK);
}
Пример #6
0
XnStatus XnSensorFirmwareParams::SetImageResolution(XnUInt64 nValue)
{
	XnStatus nRetVal = XN_STATUS_OK;

	if (m_pInfo->nFWVer < XN_SENSOR_FW_VER_5_4)
	{
		switch (nValue)
		{
		case XN_RESOLUTION_QVGA:
		case XN_RESOLUTION_VGA:
			break;
		case XN_RESOLUTION_SXGA:
			if (m_pInfo->nFWVer < XN_SENSOR_FW_VER_5_3)
			{
				XN_LOG_WARNING_RETURN(XN_STATUS_IO_INVALID_STREAM_IMAGE_RESOLUTION, XN_MASK_DEVICE_SENSOR, "Image resolution is not supported by this firmware!");
			}
			break;
		case XN_RESOLUTION_UXGA:
			if (m_pInfo->nFWVer < XN_SENSOR_FW_VER_5_1)
			{
				XN_LOG_WARNING_RETURN(XN_STATUS_IO_INVALID_STREAM_IMAGE_RESOLUTION, XN_MASK_DEVICE_SENSOR, "Image resolution is not supported by this firmware!");
			}
			break;
		default:
			XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Unsupported image resolution: %d", nValue);
		}
	}

	nRetVal = SetFirmwareParam(&m_ImageResolution, nValue);
	XN_IS_STATUS_OK(nRetVal);
	
	return (XN_STATUS_OK);
}
Пример #7
0
XnStatus XnSensor::CreateStreamModule(const XnChar* StreamType, const XnChar* StreamName, XnDeviceModuleHolder** ppStreamHolder)
{
	XnStatus nRetVal = XN_STATUS_OK;

	// make sure reading from streams is turned on
	if (!m_ReadData.GetValue())
	{
		nRetVal = m_ReadData.SetValue(TRUE);
		XN_IS_STATUS_OK(nRetVal);
	}

	XnDeviceStream* pStream;
	XnSensorStreamHelper* pHelper;

	// create stream
	if (strcmp(StreamType, XN_STREAM_TYPE_DEPTH) == 0)
	{
		XnSensorDepthStream* pDepthStream;
		XN_VALIDATE_NEW(pDepthStream, XnSensorDepthStream, StreamName, &m_Objects);
		pStream = pDepthStream;
		pHelper = pDepthStream->GetHelper();
	}
	else if (strcmp(StreamType, XN_STREAM_TYPE_IMAGE) == 0)
	{
		XnSensorImageStream* pImageStream;
		XN_VALIDATE_NEW(pImageStream, XnSensorImageStream, StreamName, &m_Objects);
		pStream = pImageStream;
		pHelper = pImageStream->GetHelper();
	}
	else if (strcmp(StreamType, XN_STREAM_TYPE_IR) == 0)
	{
		XnSensorIRStream* pIRStream;
		XN_VALIDATE_NEW(pIRStream, XnSensorIRStream, StreamName, &m_Objects);
		pStream = pIRStream;
		pHelper = pIRStream->GetHelper();
	}
	else if (strcmp(StreamType, XN_STREAM_TYPE_AUDIO) == 0)
	{
		if (!m_Firmware.GetInfo()->bAudioSupported)
		{
			XN_LOG_WARNING_RETURN(XN_STATUS_UNSUPPORTED_STREAM, XN_MASK_DEVICE_SENSOR, "Audio is not supported by this FW!");
		}

		// TODO: use the allow other users property when constructing the audio stream
		XnSensorAudioStream* pAudioStream;
		XN_VALIDATE_NEW(pAudioStream, XnSensorAudioStream, GetUSBPath(), StreamName, &m_Objects, FALSE);
		pStream = pAudioStream;
		pHelper = pAudioStream->GetHelper();
	}
	else
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_UNSUPPORTED_STREAM, XN_MASK_DEVICE_SENSOR, "Unsupported stream type: %s", StreamType);
	}

	*ppStreamHolder = XN_NEW(XnSensorStreamHolder, pStream, pHelper);

	return (XN_STATUS_OK);
}
Пример #8
0
XnStatus XnLinuxSysVNamedEvent::CreateNamed(const XnChar* strName)
{
	// translate event name to key file name
	XnUInt32 nBytesWritten;
	xnOSStrFormat(m_csSemFileName, XN_FILE_MAX_PATH, &nBytesWritten, "/tmp/XnCore.Event.%s.key", strName);

	// create the file (must exist for ftok)
	m_hSemFile = open(m_csSemFileName, O_CREAT | O_RDONLY, S_IRWXU | S_IRWXG | S_IRWXO);
	if (-1 == m_hSemFile)
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_OS_EVENT_CREATION_FAILED, XN_MASK_OS, "Create named event: failed to create key file (%d)", errno);
	}

	// create the key
	key_t key = ftok(m_csSemFileName, 1);

	// Try to create the semaphore
	// Creating two semaphores. First semaphore will be used as reference count, and second for the signaled state
	m_hSem = semget(key, XN_EVENT_NUM_OF_SEMS, IPC_CREAT | S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH);
	if (-1 == m_hSem)
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_OS_EVENT_CREATION_FAILED, XN_MASK_OS, "Create named event: failed to create semaphore (%d)", errno);
	}

	// if no one is using it so far (this is a newly created event), reset it
	if (0 == semctl(m_hSem, XN_EVENT_SEM_REF_COUNT, GETVAL))
	{
		// set it to the non-signaled state
		semun val;
		val.val = 0;
		if (0 != semctl(m_hSem, XN_EVENT_SEM_SIGNALED, SETVAL, val))
		{
			xnLogWarning(XN_MASK_OS, "Create named event: semctl for signaled failed (%d)", errno);
			semctl(m_hSem, 0, IPC_RMID);
			return XN_STATUS_OS_EVENT_CREATION_FAILED;
		}

		// set its manual reset value
		val.val = m_bManualReset;
		if (0 != semctl(m_hSem, XN_EVENT_SEM_MANUAL_RESET, SETVAL, val))
		{
			xnLogWarning(XN_MASK_OS, "Create named event: semctl for manual reset failed (%d)", errno);
			semctl(m_hSem, 0, IPC_RMID);
			return XN_STATUS_OS_EVENT_CREATION_FAILED;
		}
	}

	// adding 1 to reference count
	struct sembuf op;
	op.sem_op = 1;
	op.sem_num = XN_EVENT_SEM_REF_COUNT;
	op.sem_flg = SEM_UNDO;
	semop(m_hSem, &op, 1);

	m_bManualReset = semctl(m_hSem, XN_EVENT_SEM_MANUAL_RESET, GETVAL);

	return (XN_STATUS_OK);
}
Пример #9
0
XnStatus XnSensorImageStream::ValidateMode()
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	// validity checks
	XnIOImageFormats nInputFormat = (XnIOImageFormats)m_InputFormat.GetValue();
	XnOutputFormats nOutputFormat = GetOutputFormat();
	XnResolutions nResolution = GetResolution();
	XnUInt32 nFPS = GetFPS();

	// check that input format matches output format
	switch (nOutputFormat)
	{
	case XN_OUTPUT_FORMAT_RGB24:
		if (nInputFormat != XN_IO_IMAGE_FORMAT_YUV422 &&
			nInputFormat != XN_IO_IMAGE_FORMAT_UNCOMPRESSED_YUV422 &&
			nInputFormat != XN_IO_IMAGE_FORMAT_BAYER)
		{
			// --avin mod--
			//XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Input format %d cannot be converted to RGB24!", nInputFormat);
		}
		break;
	case XN_OUTPUT_FORMAT_YUV422:
		if (nInputFormat != XN_IO_IMAGE_FORMAT_YUV422 &&
			nInputFormat != XN_IO_IMAGE_FORMAT_UNCOMPRESSED_YUV422)
		{
			XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Input format %d cannot be converted to YUV422!", nInputFormat);
		}
		break;
	case XN_OUTPUT_FORMAT_JPEG:
		if (nInputFormat != XN_IO_IMAGE_FORMAT_JPEG)
		{
			XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Input format %d cannot be converted to JPEG!", nInputFormat);
		}
		break;
	case XN_OUTPUT_FORMAT_GRAYSCALE8:
		if (nInputFormat != XN_IO_IMAGE_FORMAT_UNCOMPRESSED_GRAY8 &&
			nInputFormat != XN_IO_IMAGE_FORMAT_UNCOMPRESSED_BAYER &&
			nInputFormat != XN_IO_IMAGE_FORMAT_BAYER)
		{
			XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Input format %d cannot be converted to Gray8!", nInputFormat);
		}
		break;
	default:
		// we shouldn't have reached here. Theres a check at SetOutputFormat.
		XN_ASSERT(FALSE);
		XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Unsupported image output format: %d!", nOutputFormat);
	}

	// now check that mode exists
	XnCmosPreset preset = { (XnUInt16)nInputFormat, (XnUInt16)nResolution, (XnUInt16)nFPS };
	nRetVal = ValidateSupportedMode(preset);
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
Пример #10
0
XnStatus XnSensorImageStream::CreateDataProcessor(XnDataProcessor** ppProcessor)
{
	XnStreamProcessor* pNew;

	switch (m_InputFormat.GetValue())
	{
	case XN_IO_IMAGE_FORMAT_BAYER:
		XN_VALIDATE_NEW_AND_INIT(pNew, XnBayerImageProcessor, this, &m_Helper);
		break;
	case XN_IO_IMAGE_FORMAT_YUV422:
		XN_VALIDATE_NEW_AND_INIT(pNew, XnPSCompressedImageProcessor, this, &m_Helper);
		break;
	case XN_IO_IMAGE_FORMAT_JPEG:
		if (GetOutputFormat() == XN_OUTPUT_FORMAT_JPEG)
		{
			XN_VALIDATE_NEW_AND_INIT(pNew, XnJpegImageProcessor, this, &m_Helper);
		}
		else if (GetOutputFormat() == XN_OUTPUT_FORMAT_RGB24)
		{
			XN_VALIDATE_NEW_AND_INIT(pNew, XnJpegToRGBImageProcessor, this, &m_Helper);
		}
		else
		{
			XN_LOG_WARNING_RETURN(XN_STATUS_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "invalid output format %d!", GetOutputFormat());
		}
		break;
	case XN_IO_IMAGE_FORMAT_UNCOMPRESSED_YUV422:
		if (GetOutputFormat() == XN_OUTPUT_FORMAT_YUV422)
		{
			XN_VALIDATE_NEW_AND_INIT(pNew, XnUncompressedYUVImageProcessor, this, &m_Helper);
		}
		else if (GetOutputFormat() == XN_OUTPUT_FORMAT_RGB24)
		{
			XN_VALIDATE_NEW_AND_INIT(pNew, XnUncompressedYUVtoRGBImageProcessor, this, &m_Helper);
		}
		else
		{
			XN_LOG_WARNING_RETURN(XN_STATUS_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "invalid output format %d!", GetOutputFormat());
		}
		break;
	case XN_IO_IMAGE_FORMAT_UNCOMPRESSED_BAYER:
		XN_VALIDATE_NEW_AND_INIT(pNew, XnUncompressedBayerProcessor, this, &m_Helper);
		break;
	default:
		return XN_STATUS_IO_INVALID_STREAM_IMAGE_FORMAT;
	}

	*ppProcessor = pNew;

	return XN_STATUS_OK;
}
Пример #11
0
XnStatus XnSensorDepthStream::DecideFirmwareRegistration(XnBool bRegistration, XnProcessingType registrationType, XnResolutions nRes)
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	// start with request
	XnBool bFirmwareRegistration = bRegistration;

	if (bFirmwareRegistration)
	{
		// old chip (PS1000) does not support registration for VGA
		XnBool bHardwareRegistrationSupported = 
			m_Helper.GetPrivateData()->ChipInfo.nChipVer != XN_SENSOR_CHIP_VER_PS1000 || nRes == XN_RESOLUTION_QVGA;

		switch (registrationType)
		{
		case XN_PROCESSING_HARDWARE:
			if (!bHardwareRegistrationSupported)
			{
				XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Sensor does not support hardware registration for current configuration!");
			}
			break;
		case XN_PROCESSING_SOFTWARE:
			if (GetResolution() != XN_RESOLUTION_VGA)
			{
				XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Software registration is only supported for VGA resolution!");
			}
			bFirmwareRegistration = FALSE;
			break;
		case XN_PROCESSING_DONT_CARE:
			bFirmwareRegistration = bHardwareRegistrationSupported;
			break;
		default:
			XN_LOG_ERROR_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Unknown registration type: %d", registrationType);
		}
	}

	if (bRegistration && !bFirmwareRegistration)
	{
		// make sure software registration is initialized
		if (!m_Registration.IsInitialized())
		{
			nRetVal = m_Registration.Init(m_Helper.GetPrivateData(), this, GetDepthToShiftTable());
			XN_IS_STATUS_OK(nRetVal);
		}
	}

	nRetVal = m_Helper.SimpleSetFirmwareParam(m_FirmwareRegistration, (XnUInt16)bFirmwareRegistration);
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
XnStatus XnDeviceModuleHolder::UnsafeSetProperty(const XnProperty* pRequest, XnProperty* pProp)
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	if (pRequest->GetType() != pProp->GetType())
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_PROPERTY_BAD_TYPE, XN_MASK_DDK, "Property '%s' has the wrong type!", pRequest->GetName());
	}

	switch (pRequest->GetType())
	{
	case XN_PROPERTY_TYPE_INTEGER:
		{
			XnActualIntProperty* pActualRequest = (XnActualIntProperty*)pRequest;
			XnActualIntProperty* pActualProp = (XnActualIntProperty*)pProp;
			nRetVal = pActualProp->UnsafeUpdateValue(pActualRequest->GetValue());
			XN_IS_STATUS_OK(nRetVal);
			break;
		}
	case XN_PROPERTY_TYPE_REAL:
		{
			XnActualRealProperty* pActualRequest = (XnActualRealProperty*)pRequest;
			XnActualRealProperty* pActualProp = (XnActualRealProperty*)pProp;
			nRetVal = pActualProp->UnsafeUpdateValue(pActualRequest->GetValue());
			XN_IS_STATUS_OK(nRetVal);
			break;
		}
	case XN_PROPERTY_TYPE_STRING:
		{
			XnActualStringProperty* pActualRequest = (XnActualStringProperty*)pRequest;
			XnActualStringProperty* pActualProp = (XnActualStringProperty*)pProp;
			nRetVal = pActualProp->UnsafeUpdateValue(pActualRequest->GetValue());
			XN_IS_STATUS_OK(nRetVal);
			break;
		}
	case XN_PROPERTY_TYPE_GENERAL:
		{
			XnActualGeneralProperty* pActualRequest = (XnActualGeneralProperty*)pRequest;
			XnActualGeneralProperty* pActualProp = (XnActualGeneralProperty*)pProp;
			nRetVal = pActualProp->UnsafeUpdateValue(pActualRequest->GetValue());
			XN_IS_STATUS_OK(nRetVal);
			break;
		}
	default:
		XN_LOG_WARNING_RETURN(XN_STATUS_ERROR, XN_MASK_DDK, "Unknown property type: %d\n", pRequest->GetType());
	} // switch
	
	return (XN_STATUS_OK);
}
Пример #13
0
XnStatus XnServerSession::HandleGetStringProperty()
{
	XnStatus nRetVal = XN_STATUS_OK;

	// read it
	XnSensorServerMessageGetPropertyRequest request;
	XnUInt32 nDataSize = sizeof(request);
	nRetVal = m_privateIncomingPacker.ReadCustomData(XN_SENSOR_SERVER_MESSAGE_GET_STRING_PROPERTY, &request, &nDataSize);
	XN_IS_STATUS_OK(nRetVal);
	if (nDataSize != sizeof(request))
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_ERROR, XN_MASK_SENSOR_SERVER, "Sensor server protocol error - invalid size!");
	}

	// get
	XnChar strValue[XN_DEVICE_MAX_STRING_LENGTH];
	XnStatus nActionResult = GetStringPropertyImpl(request.strModuleName, request.strPropertyName, strValue);
	if (nActionResult != XN_STATUS_OK)
	{
		xnLogWarning(XN_MASK_SENSOR_SERVER, "Client %u failed to get property '%s.%s': %s", m_nID, request.strModuleName, request.strPropertyName, xnGetStatusString(nActionResult));
	}

	nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_GET_STRING_PROPERTY, nActionResult, sizeof(strValue), strValue);
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
Пример #14
0
XnStatus XnSensorImageStream::SetOutputFormat(XnOutputFormats nOutputFormat)
{
	XnStatus nRetVal = XN_STATUS_OK;

	switch (nOutputFormat)
	{
	case XN_OUTPUT_FORMAT_GRAYSCALE8:
	case XN_OUTPUT_FORMAT_YUV422:
	case XN_OUTPUT_FORMAT_RGB24:
		break;
	default:
		XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Unsupported image output format: %d", nOutputFormat);
	}

	nRetVal = m_Helper.BeforeSettingDataProcessorProperty();
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = XnImageStream::SetOutputFormat(nOutputFormat);
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = m_Helper.AfterSettingDataProcessorProperty();
	XN_IS_STATUS_OK(nRetVal);
	
	return (XN_STATUS_OK);
}
Пример #15
0
XnStatus XnDeviceSensor::Init(const XnDeviceConfig* pDeviceConfig)
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	XN_VALIDATE_INPUT_PTR(pDeviceConfig);

	XnDeviceBase* pActualDevice = NULL;

	switch (pDeviceConfig->SharingMode)
	{
	case XN_DEVICE_EXCLUSIVE:
		XN_VALIDATE_NEW(pActualDevice, XnSensor);
		break;
	case XN_DEVICE_SHARED:
#if (XN_PLATFORM != XN_PLATFORM_WIN32)
		XN_LOG_WARNING_RETURN(XN_STATUS_IO_DEVICE_INVALID_SHARING, XN_MASK_DEVICE_SENSOR, "Sensor sharing is only supported under win32!");
#endif
		XN_VALIDATE_NEW(pActualDevice, XnSensorClient);
		break;
	default:
		return XN_STATUS_IO_DEVICE_INVALID_SHARING;
	}

	// init actual device
	nRetVal = pActualDevice->Init(pDeviceConfig);
	XN_IS_STATUS_OK(nRetVal);

	ReplaceActualDevice(pActualDevice);

	return (XN_STATUS_OK);
}
Пример #16
0
XnStatus PlayerNode::HandleEndRecord(EndRecord record)
{
	XN_VALIDATE_INPUT_PTR(m_pNodeNotifications);
	XnStatus nRetVal = record.Decode();
	XN_IS_STATUS_OK(nRetVal);
	DEBUG_LOG_RECORD(record, "End");

	if (!m_bDataBegun)
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_CORRUPT_FILE, XN_MASK_OPEN_NI, "File does not contain any data!");
	}

	nRetVal = m_eofReachedEvent.Raise();
	XN_IS_STATUS_OK(nRetVal);

	if (m_bRepeat)
	{
		nRetVal = Rewind();
		XN_IS_STATUS_OK(nRetVal);
	}
	else
	{
		m_bEOF = TRUE;
		CloseStream();
	}

	return XN_STATUS_OK;	
}
Пример #17
0
XnStatus XnSensorImageStream::SetResolution(XnResolutions nResolution)
{
	XnStatus nRetVal = XN_STATUS_OK;

	switch (nResolution)
	{
	case XN_RESOLUTION_QVGA:
	case XN_RESOLUTION_VGA:
	case XN_RESOLUTION_SXGA:
	case XN_RESOLUTION_UXGA:
		break;
	default:
		XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Unknown resolution: %u", nResolution);
	}

	nRetVal = m_Helper.BeforeSettingFirmwareParam(ResolutionProperty(), nResolution);
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = XnImageStream::SetResolution(nResolution);
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = m_Helper.AfterSettingFirmwareParam(ResolutionProperty());
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
Пример #18
0
XnUInt32 XnSensorImageStream::CalculateExpectedSize()
{
	XnUInt32 nExpectedImageBufferSize = GetXRes() * GetYRes();

	// when cropping is turned on, actual IR size is smaller
	const XnCropping* pCropping = GetCropping();
	if (pCropping->bEnabled)
	{
		nExpectedImageBufferSize = pCropping->nXSize * pCropping->nYSize;
	}

	switch (m_InputFormat.GetValue())
	{
	case XN_IO_IMAGE_FORMAT_YUV422:
	case XN_IO_IMAGE_FORMAT_UNCOMPRESSED_YUV422:
		// in YUV each pixel is represented in 2 bytes (actually 2 pixels are represented by 4 bytes)
		nExpectedImageBufferSize *= 2;
		break;
	case XN_IO_IMAGE_FORMAT_BAYER:
		// each pixel is one byte.
		break;
	case XN_IO_IMAGE_FORMAT_JPEG:
		// image should be in RGB now - 3 bytes a pixel
		nExpectedImageBufferSize *= 3;
		break;
	default:
		XN_LOG_WARNING_RETURN(XN_STATUS_ERROR, XN_MASK_DEVICE_SENSOR, "Does not know to calculate expected size for input format %d", m_InputFormat.GetValue());
	}

	return nExpectedImageBufferSize;
}
Пример #19
0
XnStatus XnFileDevice::ReadFileVersion()
{
    XnStatus nRetVal = XN_STATUS_OK;

    // read magic from file
    XnChar csFileMagic[XN_DEVICE_FILE_MAGIC_LEN];
    nRetVal = m_pInputStream->ReadData((XnUChar*)csFileMagic, XN_DEVICE_FILE_MAGIC_LEN);
    XN_IS_STATUS_OK(nRetVal);

    if (strncmp(csFileMagic, XN_DEVICE_FILE_MAGIC_V4, XN_DEVICE_FILE_MAGIC_LEN) == 0)
    {
        m_nFileVersion = 4;
    }
    else if (strncmp(csFileMagic, XN_DEVICE_FILE_MAGIC_V3, XN_DEVICE_FILE_MAGIC_LEN) == 0)
    {
        m_nFileVersion = 3;
    }
    else if (strncmp(csFileMagic, XN_DEVICE_FILE_MAGIC_V2, XN_DEVICE_FILE_MAGIC_LEN) == 0)
    {
        m_nFileVersion = 2;
    }
    else if (strncmp(csFileMagic, XN_DEVICE_FILE_MAGIC_V1, XN_DEVICE_FILE_MAGIC_LEN) == 0)
    {
        m_nFileVersion = 1;
    }
    else
    {
        XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_FILE_CORRUPTED, XN_MASK_FILE, "Invalid file magic!");
    }

    return (XN_STATUS_OK);
}
Пример #20
0
XnStatus XnServerSession::HandleConfigFromINIFile()
{
	XnStatus nRetVal = XN_STATUS_OK;

	// read it
	XnSensorServerMessageIniFile message;
	XnUInt32 nDataSize = sizeof(message);
	nRetVal = m_privateIncomingPacker.ReadCustomData(XN_SENSOR_SERVER_MESSAGE_INI_FILE, (XnUChar*)&message, &nDataSize);
	XN_IS_STATUS_OK(nRetVal);

	if (nDataSize != sizeof(message))
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_ERROR, XN_MASK_SENSOR_SERVER, "Sensor server protocol error - invalid size!");
	}

	// load
	XnStatus nActionResult = ConfigFromINIFileImpl(message.strFileName, message.strSectionName);
	if (nActionResult != XN_STATUS_OK)
	{
		xnLogWarning(XN_MASK_SENSOR_SERVER, "Client %u failed to config sensor from file '%s': %s", m_nID, message.strFileName, xnGetStatusString(nActionResult));
	}

	nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_GENERAL_OP_RESPOND, nActionResult);
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
Пример #21
0
XnStatus XnFileDevice::ReadInitialState(XnPropertySet* pSet)
{
    XnStatus nRetVal = XN_STATUS_OK;

    if (m_nFileVersion < 4)
    {
        if (m_pBCData == NULL)
        {
            nRetVal = BCInit();
            XN_IS_STATUS_OK(nRetVal);
        }

        return BCReadInitialState(pSet);
    }

    // read an object from data packer
    XnPackedDataType nType;
    nRetVal = m_pDataPacker->ReadNextObject(&nType);
    XN_IS_STATUS_OK(nRetVal);

    if (nType != XN_PACKED_PROPERTY_SET)
    {
        XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_FILE_CORRUPTED, XN_MASK_DDK, "Stream does not start with a property set!");
    }

    nRetVal = m_pDataPacker->ReadPropertySet(pSet);
    XN_IS_STATUS_OK(nRetVal);

    return (XN_STATUS_OK);
}
Пример #22
0
XnStatus XnServerSession::HandleGetGeneralProperty()
{
	XnStatus nRetVal = XN_STATUS_OK;

	// read it
	XnUChar bufValue[XN_SENSOR_SERVER_MAX_REPLY_SIZE];
	XnSensorServerMessageGetPropertyRequest* pRequest = (XnSensorServerMessageGetPropertyRequest*)bufValue;
	XnUChar* pData = bufValue + sizeof(XnSensorServerMessageGetPropertyRequest);
	XnUInt32 nDataSize = XN_SENSOR_SERVER_MAX_REPLY_SIZE;
	nRetVal = m_privateIncomingPacker.ReadCustomData(XN_SENSOR_SERVER_MESSAGE_GET_GENERAL_PROPERTY, bufValue, &nDataSize);
	XN_IS_STATUS_OK(nRetVal);

	if (nDataSize < sizeof(XnSensorServerMessageGetPropertyRequest))
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_ERROR, XN_MASK_SENSOR_SERVER, "Sensor server protocol error - invalid size!");
	}

	// get
	XnGeneralBuffer gbValue = XnGeneralBufferPack(pData, pRequest->nSize);
	XnStatus nActionResult = GetGeneralPropertyImpl(pRequest->strModuleName, pRequest->strPropertyName, gbValue);
	if (nActionResult != XN_STATUS_OK)
	{
		xnLogWarning(XN_MASK_SENSOR_SERVER, "Client %u failed to get property '%s.%s': %s", m_nID, pRequest->strModuleName, pRequest->strPropertyName, xnGetStatusString(nActionResult));
	}

	nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_GET_GENERAL_PROPERTY, nActionResult, pRequest->nSize, pData);
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
Пример #23
0
XnStatus XnSensorDepthStream::DecidePixelSizeFactor()
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	XnUInt32 nPixelSizeFactor;
	switch (GetFirmwareParams()->m_ReferenceResolution.GetValue())
	{
	case XN_RESOLUTION_SXGA:
		nPixelSizeFactor = 1;
		break;
	case XN_RESOLUTION_VGA:
		nPixelSizeFactor = 2;
		break;
	default:
		XN_LOG_WARNING_RETURN(XN_STATUS_ERROR, XN_MASK_DEVICE_SENSOR, "Can't resolve pixel size for reference resolution %llu", GetFirmwareParams()->m_ReferenceResolution.GetValue());
	}
	
	if (m_Helper.GetFirmwareVersion() < XN_SENSOR_FW_VER_3_0)
	{
		// due to some weird bug (we don't know the reason), DevKits older than 3.0 uses
		// a smaller pixel size, but const shift remains the same. To work around this bug,
		// we will just update pixel size, instead of updating pixel size factor (so that const
		// shift will not be updated)
		nRetVal = ZeroPlanePixelSizeProperty().UnsafeUpdateValue(m_Helper.GetFixedParams()->GetZeroPlanePixelSize() * nPixelSizeFactor);
		XN_IS_STATUS_OK(nRetVal);
	}
	else
	{
		PixelSizeFactorProperty().UnsafeUpdateValue(nPixelSizeFactor);
	}
	
	return (XN_STATUS_OK);
}
Пример #24
0
XnStatus XnDepthProcessor::Init()
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	// init base
	nRetVal = XnFrameStreamProcessor::Init();
	XN_IS_STATUS_OK(nRetVal);

	switch (GetStream()->GetOutputFormat())
	{
	case ONI_PIXEL_FORMAT_SHIFT_9_2:
		{
			// optimization. We create a LUT shift-to-shift. See comment up.
			m_pShiftToDepthTable = (OniDepthPixel*)xnOSMalloc(sizeof(OniDepthPixel)*XN_DEVICE_SENSOR_MAX_SHIFT_VALUE);
			XN_VALIDATE_ALLOC_PTR(m_pShiftToDepthTable);
			for (XnUInt32 i = 0; i < XN_DEVICE_SENSOR_MAX_SHIFT_VALUE; ++i)
			{
				m_pShiftToDepthTable[i] = (OniDepthPixel)i;
			}
			m_bShiftToDepthAllocated = TRUE;
			m_noDepthValue = 2047;
		}
		break;
	case ONI_PIXEL_FORMAT_DEPTH_1_MM:
	case ONI_PIXEL_FORMAT_DEPTH_100_UM:
		m_noDepthValue = 0;
		break;
	default:
		XN_ASSERT(FALSE);
		XN_LOG_WARNING_RETURN(XN_STATUS_ERROR, XN_MASK_SENSOR_PROTOCOL_DEPTH, "Unknown Depth output: %d", GetStream()->GetOutputFormat());
	}

	return (XN_STATUS_OK);
}
Пример #25
0
XnStatus XnSensorDepthStream::SetOutputFormat(OniPixelFormat nOutputFormat)
{
	XnStatus nRetVal = XN_STATUS_OK;

	switch (nOutputFormat)
	{
	case ONI_PIXEL_FORMAT_SHIFT_9_2:
	case ONI_PIXEL_FORMAT_DEPTH_1_MM:
		nRetVal = DeviceMaxDepthProperty().UnsafeUpdateValue(XN_DEVICE_SENSOR_MAX_DEPTH_1_MM);
		break;
	case ONI_PIXEL_FORMAT_DEPTH_100_UM:
		nRetVal = DeviceMaxDepthProperty().UnsafeUpdateValue(XN_DEVICE_SENSOR_MAX_DEPTH_100_UM);
		break;
	default:
		XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Unsupported depth output format: %d", nOutputFormat);
	}
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = m_Helper.BeforeSettingDataProcessorProperty();
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = XnDepthStream::SetOutputFormat(nOutputFormat);
	XN_IS_STATUS_OK(nRetVal);

	nRetVal = m_Helper.AfterSettingDataProcessorProperty();
	XN_IS_STATUS_OK(nRetVal);

	if (m_depthUtilsHandle != NULL)
	{	
		nRetVal = DepthUtilsSetDepthConfiguration(m_depthUtilsHandle, GetXRes(), GetYRes(), GetOutputFormat(), IsMirrored());
		XN_IS_STATUS_OK(nRetVal);
	}
		
	return (XN_STATUS_OK);
}
Пример #26
0
XnStatus XnLinuxPosixEvent::Init()
{
	// Create a cond object
	if (0 != pthread_cond_init(&m_cond, NULL))
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_OS_EVENT_CREATION_FAILED, XN_MASK_OS, "Failed to create event: cond_init returned %d", errno);
	}

	// create the restriction mutex object
	if (0 != pthread_mutex_init(&m_mutex, NULL))
	{
		pthread_cond_destroy(&m_cond);
		XN_LOG_WARNING_RETURN(XN_STATUS_OS_EVENT_CREATION_FAILED, XN_MASK_OS, "Failed to create event: mutex_init returned %d", errno);
	}

	return XN_STATUS_OK;
}
Пример #27
0
XnStatus XnFileDevice::CreateCodec(xn::ProductionNode& node)
{
    XnStatus nRetVal = XN_STATUS_OK;

    XnNodeInfo* pNodeInfo = NULL;
    if (m_nodeInfoMap.Get(node.GetName(), pNodeInfo) == XN_STATUS_OK)
    {
        XnUInt64 nValue;
        nRetVal = node.GetIntProperty(XN_STREAM_PROPERTY_COMPRESSION, nValue);
        XN_IS_STATUS_OK(nRetVal);

        // create new one
        XnCodecID codecID = XnCodec::GetCodecIDFromCompressionFormat((XnCompressionFormats)nValue);
        if (codecID == XN_CODEC_NULL)
        {
            XN_LOG_WARNING_RETURN(XN_STATUS_CORRUPT_FILE, XN_MASK_FILE, "Invalid compression type: %llu", nValue);
        }

        if (pNodeInfo->pXnCodec == NULL || pNodeInfo->pXnCodec->GetCompressionFormat() != nValue)
        {
            // release old codec
            XN_DELETE(pNodeInfo->pXnCodec);
            if (pNodeInfo->codec.IsValid())
            {
                xnRemoveNeededNode(GetSelfNodeHandle(), pNodeInfo->codec);
                pNodeInfo->codec.Release();
            }

            // special case: IR recorded with JPEG. This mode is no longer allowed by OpenNI (JPEG
            // can now only be used for image). We'll have to handle it ourselves.
            if (node.GetInfo().GetDescription().Type == XN_NODE_TYPE_IR &&
                    codecID == XN_CODEC_JPEG)
            {
                xn::IRGenerator irGen(node);
                XnMapOutputMode outputMode;
                nRetVal = irGen.GetMapOutputMode(outputMode);
                XN_IS_STATUS_OK(nRetVal);

                XN_VALIDATE_NEW_AND_INIT(pNodeInfo->pXnCodec, XnJpegCodec, TRUE, outputMode.nXRes, outputMode.nYRes);
            }
            else
            {
                // normal case
                nRetVal = m_context.CreateCodec(codecID, node, pNodeInfo->codec);
                XN_IS_STATUS_OK(nRetVal);

                // we need to make the codec a needed node, so that if xnForceShutdown() is called, we will be
                // destroyed *before* it does (as we hold a reference to it).
                nRetVal = xnAddNeededNode(GetSelfNodeHandle(), pNodeInfo->codec);
                XN_IS_STATUS_OK(nRetVal);

                XN_VALIDATE_NEW(pNodeInfo->pXnCodec, XnNiCodec, pNodeInfo->codec);
            }
        }
    }

    return (XN_STATUS_OK);
}
Пример #28
0
XnStatus XnProperty::GetValue(void* pValue) const
{
	if (m_pGetCallback == NULL)
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_PROPERTY_WRITE_ONLY, XN_MASK_DDK, "Property %s.%s is write only.", GetModule(), GetName());
	}

	return CallGetCallback(m_pGetCallback, pValue, m_pGetCallbackCookie);
}
Пример #29
0
XnStatus xnConfigureSetOpcode(XnNodeHandle hNode, const TiXmlElement* pOpcode)
{
	XnStatus nRetVal = XN_STATUS_OK;

	const XnChar* strOpcode = pOpcode->Value();

	if (strcmp(strOpcode, "Mirror") == 0)
	{
		return xnConfigureMirror(hNode, pOpcode);
	}
	else if (strcmp(strOpcode, "MapOutputMode") == 0)
	{
		return xnConfigureMapOutputMode(hNode, pOpcode);
	}
	else if (strcmp(strOpcode, "WaveOutputMode") == 0)
	{
		return xnConfigureWaveOutputMode(hNode, pOpcode);
	}
	else if (strcmp(strOpcode, "Cropping") == 0)
	{
		return xnConfigureCropping(hNode, pOpcode);
	}
	else if (strcmp(strOpcode, "PixelFormat") == 0)
	{
		return xnConfigurePixelFormat(hNode, pOpcode);
	}
	else if (strcmp(strOpcode, "UserPosition") == 0)
	{
		return xnConfigureUserPosition(hNode, pOpcode);
	}
	else if (strcmp(strOpcode, "Property") == 0)
	{
		return xnConfigureProperty(hNode, pOpcode);
	}
	else if (strcmp(strOpcode, "FrameSync") == 0)
	{
		return xnConfigureFrameSync(hNode, pOpcode);
	}
	else if (strcmp(strOpcode, "AlternativeViewPoint") == 0)
	{
		return xnConfigureAlternativeViewPoint(hNode, pOpcode);
	}
	else if (strcmp(strOpcode, "RecorderDestination") == 0)
	{
		return xnConfigureRecorderDestination(hNode, pOpcode);
	}
	else if (strcmp(strOpcode, "AddNodeToRecording") == 0)
	{
		return xnConfigureAddNodeToRecording(hNode, pOpcode);
	}
	else
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_CORRUPT_FILE, XN_MASK_OPEN_NI, "Invalid configuration option: %s", strOpcode);
	}
	
	return (XN_STATUS_OK);
}
Пример #30
0
XnStatus XnSensorImageStream::SetImageQuality(XnUInt32 /*nQuality*/)
{
	// check relevance
	if (m_InputFormat.GetValue() != XN_IO_IMAGE_FORMAT_JPEG)
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_UNSUPPORTED_PARAMETER, XN_MASK_DEVICE_SENSOR, "Image quality is only supported when input format is JPEG");
	}
	
	return (XN_STATUS_OK);
}