XnStatus XnServerSensorInvoker::GetStreamMaxResolution(SensorInvokerStream* pStream, XnUInt32& nMaxNumPixels)
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	XnUInt64 nCount = 0;
	nRetVal = m_sensor.GetProperty(pStream->strType, XN_STREAM_PROPERTY_SUPPORT_MODES_COUNT, &nCount);
	XN_IS_STATUS_OK(nRetVal);

	XnCmosPreset* aPresets = XN_NEW_ARR(XnCmosPreset, nCount);
	nRetVal = m_sensor.GetProperty(pStream->strType, XN_STREAM_PROPERTY_SUPPORT_MODES, XnGeneralBufferPack(aPresets, nCount * sizeof(XnCmosPreset)));
	if (nRetVal != XN_STATUS_OK)
	{
		XN_DELETE_ARR(aPresets);
		return nRetVal;
	}

	XnUInt32 nMaxPixels = 0;
	for (XnUInt32 i = 0; i < nCount; ++i)
	{
		XnUInt32 nXRes;
		XnUInt32 nYRes;
		if (!XnDDKGetXYFromResolution((XnResolutions)aPresets[i].nResolution, &nXRes, &nYRes))
		{
			continue;
		}

		if (nXRes * nYRes > nMaxPixels)
		{
			nMaxPixels = nXRes * nYRes;
		}
	}

	XN_ASSERT(nMaxPixels > 0);

	XN_DELETE_ARR(aPresets);

	nMaxNumPixels = nMaxPixels;
	
	return (XN_STATUS_OK);
}
Esempio n. 2
0
XnStatus XnPixelStream::OnResolutionChanged()
{
    XnStatus nRetVal = XN_STATUS_OK;

    XnResolutions res = (XnResolutions)m_Resolution.GetValue();
    if (res != XN_RESOLUTION_CUSTOM)
    {
        // update XRes and YRes accordingly
        XnUInt32 nXRes;
        XnUInt32 nYRes;
        if (!XnDDKGetXYFromResolution(res, &nXRes, &nYRes))
        {
            XN_ASSERT(FALSE);
        }

        nRetVal = m_XRes.UnsafeUpdateValue(nXRes);
        XN_IS_STATUS_OK(nRetVal);

        nRetVal = m_YRes.UnsafeUpdateValue(nYRes);
        XN_IS_STATUS_OK(nRetVal);
    }

    return (XN_STATUS_OK);
}
Esempio n. 3
0
XnStatus XnOniDevice::FillSupportedVideoModes()
{
	XnUInt32 nSupportedModes      = 0;
	XnCmosPreset* pSupportedModes = NULL;
	
	int s = 0;

	// Depth
	nSupportedModes = m_sensor.GetDevicePrivateData()->FWInfo.depthModes.GetSize();
	pSupportedModes = m_sensor.GetDevicePrivateData()->FWInfo.depthModes.GetData();

	m_sensors[s].sensorType             = ONI_SENSOR_DEPTH;
	m_sensors[s].pSupportedVideoModes   = XN_NEW_ARR(OniVideoMode, nSupportedModes);
	XN_VALIDATE_ALLOC_PTR(m_sensors[s].pSupportedVideoModes);
	
	OniPixelFormat depthFormats[] = { ONI_PIXEL_FORMAT_DEPTH_1_MM, ONI_PIXEL_FORMAT_DEPTH_100_UM };
	XnSizeT depthFormatsCount = sizeof(depthFormats) / sizeof(depthFormats[0]);

	int writeIndex = 0;
	for(XnUInt32 i = 0; i < nSupportedModes; ++i)
	{
		for (XnSizeT formatIndex = 0; formatIndex < depthFormatsCount; ++formatIndex)
		{
			m_sensors[s].pSupportedVideoModes[writeIndex].pixelFormat = depthFormats[formatIndex];
			m_sensors[s].pSupportedVideoModes[writeIndex].fps = pSupportedModes[i].nFPS;
			XnBool bOK = XnDDKGetXYFromResolution(
				(XnResolutions)pSupportedModes[i].nResolution,
				(XnUInt32*)&m_sensors[s].pSupportedVideoModes[writeIndex].resolutionX,
				(XnUInt32*)&m_sensors[s].pSupportedVideoModes[writeIndex].resolutionY
				);
			XN_ASSERT(bOK);
			XN_REFERENCE_VARIABLE(bOK);

			bool foundMatch = false;
			for (int j = 0; j < writeIndex; ++j)
			{
				if (m_sensors[s].pSupportedVideoModes[writeIndex].pixelFormat == m_sensors[s].pSupportedVideoModes[j].pixelFormat &&
					m_sensors[s].pSupportedVideoModes[writeIndex].fps == m_sensors[s].pSupportedVideoModes[j].fps &&
					m_sensors[s].pSupportedVideoModes[writeIndex].resolutionX == m_sensors[s].pSupportedVideoModes[j].resolutionX &&
					m_sensors[s].pSupportedVideoModes[writeIndex].resolutionY == m_sensors[s].pSupportedVideoModes[j].resolutionY)
				{
					// Already know this configuration
					foundMatch = true;
					break;
				}
			}
			if (!foundMatch)
			{
				++writeIndex;
			}
		}
	}
	m_sensors[s].numSupportedVideoModes = writeIndex;

	// Image

	// first, make sure that our sensor actually supports Image
	XnUInt64 nImageSupported = FALSE;
	XnStatus nRetVal = m_sensor.GetProperty(XN_MASK_DEVICE, XN_MODULE_PROPERTY_IMAGE_SUPPORTED, &nImageSupported);
	XN_IS_STATUS_OK(nRetVal);
	if (nImageSupported)
	{
		++s;
		nSupportedModes = m_sensor.GetDevicePrivateData()->FWInfo.imageModes.GetSize();
		pSupportedModes = m_sensor.GetDevicePrivateData()->FWInfo.imageModes.GetData();

		m_sensors[s].sensorType             = ONI_SENSOR_COLOR;
		m_sensors[s].numSupportedVideoModes = 0; // to be changed later..
		m_sensors[s].pSupportedVideoModes   = XN_NEW_ARR(OniVideoMode, nSupportedModes * 10);
		XN_VALIDATE_ALLOC_PTR(m_sensors[s].pSupportedVideoModes);
		
		writeIndex = 0;
		for(XnUInt32 j=0; j < nSupportedModes; ++j)
		{
			// make an OniVideoMode for each OniFormat supported by the input format
			OniPixelFormat aOniFormats[10];
			int       nOniFormats = 0;
			XnOniColorStream::GetAllowedOniOutputFormatForInputFormat((XnIOImageFormats)pSupportedModes[j].nFormat, aOniFormats, &nOniFormats);
			for(int curOni=0; curOni<nOniFormats; ++curOni)
			{
				m_sensors[s].pSupportedVideoModes[writeIndex].pixelFormat = aOniFormats[curOni];
			
				m_sensors[s].pSupportedVideoModes[writeIndex].fps = pSupportedModes[j].nFPS;
				XnBool bOK = XnDDKGetXYFromResolution(
					(XnResolutions)pSupportedModes[j].nResolution,
					(XnUInt32*)&m_sensors[s].pSupportedVideoModes[writeIndex].resolutionX,
					(XnUInt32*)&m_sensors[s].pSupportedVideoModes[writeIndex].resolutionY
					);
				XN_ASSERT(bOK);
				XN_REFERENCE_VARIABLE(bOK);

				bool foundMatch = false;
				for (int i = 0; i < writeIndex; ++i)
				{
					if (m_sensors[s].pSupportedVideoModes[writeIndex].pixelFormat == m_sensors[s].pSupportedVideoModes[i].pixelFormat &&
						m_sensors[s].pSupportedVideoModes[writeIndex].fps == m_sensors[s].pSupportedVideoModes[i].fps &&
						m_sensors[s].pSupportedVideoModes[writeIndex].resolutionX == m_sensors[s].pSupportedVideoModes[i].resolutionX &&
						m_sensors[s].pSupportedVideoModes[writeIndex].resolutionY == m_sensors[s].pSupportedVideoModes[i].resolutionY)
					{
						// Already know this configuration
						foundMatch = true;
						break;
					}
				}
				if (!foundMatch)
				{
					++writeIndex;
				}
			}
		}
		m_sensors[s].numSupportedVideoModes = writeIndex;
	}

	// IR
	++s;
	nSupportedModes = m_sensor.GetDevicePrivateData()->FWInfo.irModes.GetSize();
	pSupportedModes = m_sensor.GetDevicePrivateData()->FWInfo.irModes.GetData();

	m_sensors[s].sensorType             = ONI_SENSOR_IR;
	m_sensors[s].pSupportedVideoModes   = XN_NEW_ARR(OniVideoMode, nSupportedModes);
	XN_VALIDATE_ALLOC_PTR(m_sensors[s].pSupportedVideoModes);
	
	writeIndex = 0;
	for(XnUInt32 i=0; i < nSupportedModes; ++i)
	{
		m_sensors[s].pSupportedVideoModes[writeIndex].pixelFormat = ONI_PIXEL_FORMAT_GRAY16;
		m_sensors[s].pSupportedVideoModes[writeIndex].fps = pSupportedModes[i].nFPS;
		XnBool bOK = XnDDKGetXYFromResolution(
			(XnResolutions)pSupportedModes[i].nResolution,
			(XnUInt32*)&m_sensors[s].pSupportedVideoModes[writeIndex].resolutionX,
			(XnUInt32*)&m_sensors[s].pSupportedVideoModes[writeIndex].resolutionY
			);
		XN_ASSERT(bOK);
		XN_REFERENCE_VARIABLE(bOK);

		bool foundMatch = false;
		for (int j = 0; j < writeIndex; ++j)
		{
			if (m_sensors[s].pSupportedVideoModes[writeIndex].pixelFormat == m_sensors[s].pSupportedVideoModes[j].pixelFormat &&
				m_sensors[s].pSupportedVideoModes[writeIndex].fps == m_sensors[s].pSupportedVideoModes[j].fps &&
				m_sensors[s].pSupportedVideoModes[writeIndex].resolutionX == m_sensors[s].pSupportedVideoModes[j].resolutionX &&
				m_sensors[s].pSupportedVideoModes[writeIndex].resolutionY == m_sensors[s].pSupportedVideoModes[j].resolutionY)
			{
				// Already know this configuration
				foundMatch = true;
				break;
			}
		}
		if (!foundMatch)
		{
			++writeIndex;
		}
	}
	m_sensors[s].numSupportedVideoModes = writeIndex;
	m_numSensors = s+1;

	return XN_STATUS_OK;
}