void XnUncompressedYUVtoRGBImageProcessor::ProcessFramePacketChunk(const XnSensorProtocolResponseHeader* pHeader, const XnUChar* pData, XnUInt32 nDataOffset, XnUInt32 nDataSize)
{
	XN_PROFILING_START_SECTION("XnUncompressedYUVtoRGBImageProcessor::ProcessFramePacketChunk")

	XnBuffer* pWriteBuffer = GetWriteBuffer();

	if (m_ContinuousBuffer.GetSize() != 0)
	{
		// fill in to a whole element
		XnUInt32 nReadBytes = XN_MIN(nDataSize, XN_YUV_TO_RGB_INPUT_ELEMENT_SIZE - m_ContinuousBuffer.GetSize());
		m_ContinuousBuffer.UnsafeWrite(pData, nReadBytes);
		pData += nReadBytes;
		nDataSize -= nReadBytes;

		if (m_ContinuousBuffer.GetSize() == XN_YUV_TO_RGB_INPUT_ELEMENT_SIZE)
		{
			if (CheckWriteBufferForOverflow(XN_YUV_TO_RGB_OUTPUT_ELEMENT_SIZE))
			{
				// process it
				XnUInt32 nActualRead = 0;
				XnUInt32 nOutputSize = pWriteBuffer->GetFreeSpaceInBuffer();
				YUV422ToRGB888(m_ContinuousBuffer.GetData(), pWriteBuffer->GetUnsafeWritePointer(), XN_YUV_TO_RGB_INPUT_ELEMENT_SIZE, &nActualRead, &nOutputSize);
				pWriteBuffer->UnsafeUpdateSize(XN_YUV_TO_RGB_OUTPUT_ELEMENT_SIZE);
			}

			m_ContinuousBuffer.Reset();
		}
	}

	if (CheckWriteBufferForOverflow(nDataSize / XN_YUV_TO_RGB_INPUT_ELEMENT_SIZE * XN_YUV_TO_RGB_OUTPUT_ELEMENT_SIZE))
	{
		XnUInt32 nActualRead = 0;
		XnUInt32 nOutputSize = pWriteBuffer->GetFreeSpaceInBuffer();
		YUV422ToRGB888(pData, pWriteBuffer->GetUnsafeWritePointer(), nDataSize, &nActualRead, &nOutputSize);
		pWriteBuffer->UnsafeUpdateSize(nOutputSize);
		pData += nActualRead;
		nDataSize -= nActualRead;

		// if we have any bytes left, store them for next packet.
		if (nDataSize > 0)
		{
			// no need to check for overflow. there can not be a case in which more than XN_INPUT_ELEMENT_SIZE
			// are left.
			m_ContinuousBuffer.UnsafeWrite(pData, nDataSize);
		}
	}

	XN_PROFILING_END_SECTION
}
Example #2
0
void drawColor(IntRect* pLocation, IntPair* pPointer, int pointerRed, int pointerGreen, int pointerBlue)
{
	if (g_DrawConfig.Streams.Color.Coloring == COLOR_OFF)
		return;

	if (!isColorOn() && !isIROn())
	{
		drawClosedStream(pLocation, "Color");
		return;
	}

	openni::VideoFrameRef colorMD;
	int depthWidth = 0, depthHeight = 0, depthFullWidth = 0, depthFullHeight = 0;
	int depthOriginX = 0, depthOriginY = 0;

	if (isColorOn())
	{
		colorMD = getColorFrame();
		if (!colorMD.isValid())
			return;
	}
 	else if (isIROn())
 	{
 		colorMD = getIRFrame();
 	}
	else
		return;

	if (!colorMD.isValid())
		return;

	if (colorMD.getFrameIndex() == 0)
	{
		return;
	}

	openni::VideoFrameRef depthMetaData = getDepthFrame();

	int width = colorMD.getWidth();
	int height = colorMD.getHeight();
	int fullWidth = colorMD.getVideoMode().getResolutionX();
	int fullHeight = colorMD.getVideoMode().getResolutionY();
	int originX = colorMD.getCropOriginX();
	int originY = colorMD.getCropOriginY();

	XnUInt8* pColor = (XnUInt8*)colorMD.getData();
	bool useDepth = false;
	openni::PixelFormat format = colorMD.getVideoMode().getPixelFormat();

	openni::DepthPixel* pDepth = NULL;

	if (depthMetaData.isValid())
	{
		useDepth = true;
		depthWidth = depthMetaData.getWidth();
		depthHeight = depthMetaData.getHeight();
		depthFullWidth = depthMetaData.getVideoMode().getResolutionX();
		depthFullHeight = depthMetaData.getVideoMode().getResolutionY();
		depthOriginX = depthMetaData.getCropOriginX();
		depthOriginY = depthMetaData.getCropOriginY();

		pDepth = (openni::DepthPixel*)depthMetaData.getData();
	}

	for (XnUInt16 nY = 0; nY < height; nY++)
	{
		XnUInt8* pTexture = TextureMapGetLine(&g_texColor, nY + originY) + originX*4;

		if (format == openni::PIXEL_FORMAT_YUV422)
 		{
			YUV422ToRGB888(pColor, pTexture, width*2, g_texColor.Size.X*g_texColor.nBytesPerPixel);
 			pColor += width*2;
 		}
		else if (format == openni::PIXEL_FORMAT_YUYV)
		{
			YUYVToRGB888(pColor, pTexture, width*2, g_texColor.Size.X*g_texColor.nBytesPerPixel);
			pColor += width*2;
		}
 		else
		{
			XnDouble dRealY = (nY + originY) / (XnDouble)fullHeight;
			XnInt32 nDepthY = dRealY * depthFullHeight - depthOriginY;

			for (XnUInt16 nX = 0; nX < width; nX++, pTexture+=4)
			{
				XnInt32 nDepthIndex = 0;

				if (useDepth)
				{
					XnDouble dRealX = (nX + originX) / (XnDouble)fullWidth;

					XnInt32 nDepthX = dRealX * depthFullWidth - depthOriginX;

					if (nDepthX >= depthWidth || nDepthY >= depthHeight || nDepthX < 0 || nDepthY < 0)
					{
						nDepthIndex = -1;
					}
					else
					{
						nDepthIndex = nDepthY*depthWidth + nDepthX;
					}
				}

				switch (format)
 				{
				case openni::PIXEL_FORMAT_RGB888:
					pTexture[0] = pColor[0];
					pTexture[1] = pColor[1];
					pTexture[2] = pColor[2];
					pColor+=3; 
 					break;
				case openni::PIXEL_FORMAT_GRAY8:
 					pTexture[0] = pTexture[1] = pTexture[2] = *pColor;
 					pColor+=1; 
 					break;
				case openni::PIXEL_FORMAT_GRAY16:
 					pTexture[0] = pTexture[1] = pTexture[2] = *((XnUInt16*)pColor) >> 2;
 					pColor+=2; 
 					break;
				default:
					assert(0);
					return;
 				}

				// decide if pixel should be lit or not
				if (g_DrawConfig.Streams.Color.Coloring == DEPTH_MASKED_COLOR &&
					(!depthMetaData.isValid() || nDepthIndex == -1 || pDepth[nDepthIndex] == 0))
				{
					pTexture[3] = 0;
				}
				else
				{
					pTexture[3] = 255;
				}
			}
		}
	}

	if (pPointer != NULL)
	{
		TextureMapDrawCursor(&g_texColor, *pPointer, pointerRed, pointerGreen, pointerBlue);
	}

	TextureMapUpdate(&g_texColor);
	TextureMapDraw(&g_texColor, pLocation);
}
Example #3
0
void drawColorImage(IntRect* pLocation, IntPair* pPointer, int pointerRed, int pointerGreen, int pointerBlue)
{
	if (g_DrawConfig.Streams.bBackground)
		TextureMapDraw(&g_texBackground, pLocation);

	if (g_DrawConfig.Streams.Image.Coloring == IMAGE_OFF)
		return;

	if (!isImageOn() && !isIROn())
	{
		drawClosedStream(pLocation, "Image");
		return;
	}

	const MapMetaData* pImageMD;
	const XnUInt8* pImage = NULL;

	if (isImageOn())
	{
		pImageMD = getImageMetaData();
		pImage = getImageMetaData()->Data();
	}
	else if (isIROn())
	{
		pImageMD = getIRMetaData();
		pImage = (const XnUInt8*)getIRMetaData()->Data();
	}
	else
		return;

	if (pImageMD->FrameID() == 0)
	{
		return;
	}

	const DepthMetaData* pDepthMetaData = getDepthMetaData();

	double grayscale16Factor = 1.0;
	if (pImageMD->PixelFormat() == XN_PIXEL_FORMAT_GRAYSCALE_16_BIT)
	{
		int nPixelsCount = pImageMD->XRes()*pImageMD->YRes();
		XnUInt16* pPixel = (XnUInt16*)pImage;
		for (int i = 0; i < nPixelsCount; ++i,++pPixel)
		{
			if (*pPixel > g_nMaxGrayscale16Value)
				g_nMaxGrayscale16Value = *pPixel;
		}

		if (g_nMaxGrayscale16Value > 0)
		{
			grayscale16Factor = 255.0 / g_nMaxGrayscale16Value;
		}
	}

	for (XnUInt16 nY = pImageMD->YOffset(); nY < pImageMD->YRes() + pImageMD->YOffset(); nY++)
	{
		XnUInt8* pTexture = TextureMapGetLine(&g_texImage, nY) + pImageMD->XOffset()*4;

		if (pImageMD->PixelFormat() == XN_PIXEL_FORMAT_YUV422)
		{
			YUV422ToRGB888(pImage, pTexture, pImageMD->XRes()*2, g_texImage.Size.X*g_texImage.nBytesPerPixel);
			pImage += pImageMD->XRes()*2;
		}
		else
		{
			for (XnUInt16 nX = 0; nX < pImageMD->XRes(); nX++, pTexture+=4)
			{
				XnInt32 nDepthIndex = 0;

				if (pDepthMetaData != NULL)
				{
					XnDouble dRealX = (nX + pImageMD->XOffset()) / (XnDouble)pImageMD->FullXRes();
					XnDouble dRealY = nY / (XnDouble)pImageMD->FullYRes();

					XnUInt32 nDepthX = dRealX * pDepthMetaData->FullXRes() - pDepthMetaData->XOffset();
					XnUInt32 nDepthY = dRealY * pDepthMetaData->FullYRes() - pDepthMetaData->YOffset();

					if (nDepthX >= pDepthMetaData->XRes() || nDepthY >= pDepthMetaData->YRes())
					{
						nDepthIndex = -1;
					}
					else
					{
						nDepthIndex = nDepthY*pDepthMetaData->XRes() + nDepthX;
					}
				}

				switch (pImageMD->PixelFormat())
				{
				case XN_PIXEL_FORMAT_RGB24:
					pTexture[0] = pImage[0];
					pTexture[1] = pImage[1];
					pTexture[2] = pImage[2];
					pImage+=3; 
					break;
				case XN_PIXEL_FORMAT_GRAYSCALE_8_BIT:
					pTexture[0] = pTexture[1] = pTexture[2] = *pImage;
					pImage+=1; 
					break;
				case XN_PIXEL_FORMAT_GRAYSCALE_16_BIT:
					XnUInt16* p16 = (XnUInt16*)pImage;
					XnUInt8 textureValue = 0;
					textureValue = (XnUInt8)((*p16) * grayscale16Factor);
					pTexture[0] = pTexture[1] = pTexture[2] = textureValue;
					pImage+=2; 
					break;
				}

				// decide if pixel should be lit or not
				if (g_DrawConfig.Streams.Image.Coloring == DEPTH_MASKED_IMAGE &&
					(pDepthMetaData == NULL || nDepthIndex == -1 || pDepthMetaData->Data()[nDepthIndex] == 0))
				{
					pTexture[3] = 0;
				}
				else
				{
					pTexture[3] = 255;
				}
			}
		}
	}

	if (pPointer != NULL)
	{
		TextureMapDrawCursor(&g_texImage, *pPointer, pointerRed, pointerGreen, pointerBlue);
	}

	TextureMapUpdate(&g_texImage);
	TextureMapDraw(&g_texImage, pLocation);
}