예제 #1
0
XnUInt32 XnSensorImageStream::CalculateExpectedSize()
{
	XnUInt32 nExpectedImageBufferSize = GetXRes() * GetYRes();

	// when cropping is turned on, actual IR size is smaller
	const XnCropping* pCropping = GetCropping();
	if (pCropping->bEnabled)
	{
		nExpectedImageBufferSize = pCropping->nXSize * pCropping->nYSize;
	}

	switch (m_InputFormat.GetValue())
	{
	case XN_IO_IMAGE_FORMAT_YUV422:
	case XN_IO_IMAGE_FORMAT_UNCOMPRESSED_YUV422:
		// in YUV each pixel is represented in 2 bytes (actually 2 pixels are represented by 4 bytes)
		nExpectedImageBufferSize *= 2;
		break;
	case XN_IO_IMAGE_FORMAT_BAYER:
		// each pixel is one byte.
		break;
	case XN_IO_IMAGE_FORMAT_JPEG:
		// image should be in RGB now - 3 bytes a pixel
		nExpectedImageBufferSize *= 3;
		break;
	default:
		XN_LOG_WARNING_RETURN(XN_STATUS_ERROR, XN_MASK_DEVICE_SENSOR, "Does not know to calculate expected size for input format %d", m_InputFormat.GetValue());
	}

	return nExpectedImageBufferSize;
}
예제 #2
0
OniStatus LinkOniMapStream::getProperty(int propertyId, void* data, int* pDataSize)
{
	XnStatus nRetVal = XN_STATUS_ERROR;

	switch(propertyId)
	{
		case ONI_STREAM_PROPERTY_VIDEO_MODE:
			EXACT_PROP_SIZE(*pDataSize, OniVideoMode);
			nRetVal = GetVideoMode((OniVideoMode*)data);
			XN_IS_STATUS_OK_RET(nRetVal, ONI_STATUS_ERROR);
			break;
		
		case ONI_STREAM_PROPERTY_MIRRORING:
			EXACT_PROP_SIZE(*pDataSize, OniBool);
			nRetVal = GetMirror((OniBool*)data);
			XN_IS_STATUS_OK_RET(nRetVal, ONI_STATUS_ERROR);
			break;
		
		case ONI_STREAM_PROPERTY_CROPPING:
			EXACT_PROP_SIZE(*pDataSize, OniCropping);
			nRetVal = GetCropping(*(OniCropping*)data);
			XN_IS_STATUS_OK_RET(nRetVal, ONI_STATUS_ERROR);
			break;

		case LINK_PROP_PIXEL_FORMAT:
			ENSURE_PROP_SIZE(*pDataSize, XnLinkPixelFormat);
			ASSIGN_PROP_VALUE_INT(data, *pDataSize, m_pInputStream->GetVideoMode().m_nPixelFormat);
			break;
			
		case LINK_PROP_COMPRESSION:
			ENSURE_PROP_SIZE(*pDataSize, XnLinkCompressionType);
			ASSIGN_PROP_VALUE_INT(data, *pDataSize, m_pInputStream->GetVideoMode().m_nCompression);
			break;

		case PS_PROPERTY_GAIN:
			{
				ENSURE_PROP_SIZE(*pDataSize, XnUInt16);
				XnUInt16 gain;
				nRetVal = m_pInputStream->GetGain(gain);
				XN_IS_STATUS_OK_RET(nRetVal, ONI_STATUS_ERROR);
				ASSIGN_PROP_VALUE_INT(data, *pDataSize, gain);
			}
			break;

		default:
		{
			return LinkOniStream::getProperty(propertyId, data, pDataSize);
		}
	}

	return ONI_STATUS_OK;
}
예제 #3
0
XnStatus XnSensorIRStream::OnIsMirroredChanged()
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	// if cropping is on, we need to flip it
	OniCropping cropping = *GetCropping();
	if (cropping.enabled)
	{
		nRetVal = SetCropping(&cropping);
		XN_IS_STATUS_OK(nRetVal);
	}
	
	return (XN_STATUS_OK);
}
예제 #4
0
XnStatus XnSensorIRStream::SetCroppingMode(XnCroppingMode mode)
{
	switch (mode)
	{
	case XN_CROPPING_MODE_NORMAL:
	case XN_CROPPING_MODE_INCREASED_FPS:
	case XN_CROPPING_MODE_SOFTWARE_ONLY:
		break;
	default:
		XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_BAD_PARAM, XN_MASK_DEVICE_SENSOR, "Bad cropping mode: %u", mode);
	}

	return SetCroppingImpl(GetCropping(), mode);
}
예제 #5
0
XnStatus XnPixelStream::FixCropping()
{
    XnStatus nRetVal = XN_STATUS_OK;

    XnCropping cropping = *GetCropping();
    if (cropping.nXOffset > GetXRes() ||
            cropping.nYOffset > GetYRes() ||
            XnUInt32(cropping.nXOffset + cropping.nXSize) > GetXRes() ||
            XnUInt32(cropping.nYOffset + cropping.nYSize) > GetYRes())
    {
        // disable it
        cropping.bEnabled = FALSE;
        nRetVal = SetCropping(&cropping);
        XN_IS_STATUS_OK(nRetVal);
    }

    return (XN_STATUS_OK);
}
예제 #6
0
XnStatus XnPixelStream::FixCropping()
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	OniCropping cropping = *GetCropping();
	if (cropping.originX > (int)GetXRes() || 
		cropping.originY > (int)GetYRes() ||
		(cropping.originX + cropping.width) > (int)GetXRes() ||
		(cropping.originY + cropping.height) > (int)GetYRes())
	{
		// disable it
		cropping.enabled = FALSE;
		nRetVal = SetCropping(&cropping);
		XN_IS_STATUS_OK(nRetVal);
	}
	
	return (XN_STATUS_OK);
}
예제 #7
0
void XnPixelStream::NewDataAvailable(OniFrame* pFrame)
{
	// crop
	xnOSEnterCriticalSection(GetLock());
	OniCropping cropping = *GetCropping();
	xnOSLeaveCriticalSection(GetLock());

	if (cropping.enabled)
	{
		XnStatus nRetVal = CropImpl(pFrame, &cropping);
		if (nRetVal != XN_STATUS_OK)
		{
			xnLogWarning(XN_MASK_DDK, "Failed to crop! Frame will be dropped");
			return;
		}
	}

	XnFrameStream::NewDataAvailable(pFrame);
}
예제 #8
0
void XnSensorIRGenerator::OnResChanged()
{
	// we calculate the size because the IR stream actually gives out a bigger buffer, but
	// we want the buffer we return to be with the right size.
	XnMapOutputMode outputMode;
	GetMapOutputMode(outputMode);

	XnUInt32 nPixels = outputMode.nXRes * outputMode.nYRes;

	XnCropping cropping;
	GetCropping(cropping);

	if (cropping.bEnabled)
	{
		nPixels = cropping.nXSize * cropping.nYSize;
	}

	m_nBufferSize = nPixels * sizeof(XnIRPixel);
}
예제 #9
0
XnStatus XnPixelStream::ReadImpl(XnStreamData* pStreamOutput)
{
    XnStatus nRetVal = XN_STATUS_OK;

    // first read
    nRetVal = XnFrameStream::ReadImpl(pStreamOutput);
    XN_IS_STATUS_OK(nRetVal);

    // now crop
    xnOSEnterCriticalSection(GetLock());
    XnCropping cropping = *GetCropping();
    xnOSLeaveCriticalSection(GetLock());

    if (cropping.bEnabled)
    {
        nRetVal = CropImpl(pStreamOutput, &cropping);
        XN_IS_STATUS_OK(nRetVal);
    }

    return (XN_STATUS_OK);
}
예제 #10
0
XnStatus XnPixelStream::Mirror(XnStreamData* pStreamOutput) const
{
    XnUInt32 nXRes = GetCropping()->bEnabled ? GetCropping()->nXSize : GetXRes();
    return XnFormatsMirrorPixelData(GetOutputFormat(), (XnUChar*)pStreamOutput->pData, pStreamOutput->nDataSize, nXRes);
}