Exemple #1
0
bool captureOpenWriteDevice()
{
  XnStatus nRetVal = XN_STATUS_OK;
  NodeInfoList recordersList;
  
  nRetVal = g_Context.EnumerateProductionTrees(XN_NODE_TYPE_RECORDER, NULL, recordersList);
  START_CAPTURE_CHECK_RC(nRetVal, "Enumerate recorders");

  // take first
  NodeInfo chosen = *recordersList.Begin();
  nRetVal = g_Context.CreateProductionTree(chosen);
  START_CAPTURE_CHECK_RC(nRetVal, "Create recorder");

  g_Capture.pRecorder = new Recorder;
  nRetVal = chosen.GetInstance(*g_Capture.pRecorder);
  START_CAPTURE_CHECK_RC(nRetVal, "Get recorder instance");

  nRetVal = g_Capture.pRecorder->SetDestination(XN_RECORD_MEDIUM_FILE, g_Capture.csFileName);
  START_CAPTURE_CHECK_RC(nRetVal, "Set output file");

  if (getDevice() != NULL)
  {
	  nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getDevice(), XN_CODEC_UNCOMPRESSED);
	  START_CAPTURE_CHECK_RC(nRetVal, "add device node");
  }

  if (isDepthOn() && (g_Capture.DepthFormat != CODEC_DONT_CAPTURE))
  {
	  nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getDepthGenerator(), g_Capture.DepthFormat);
	  START_CAPTURE_CHECK_RC(nRetVal, "add depth node");
  }

  if (isImageOn() && (g_Capture.ImageFormat != CODEC_DONT_CAPTURE))
  {
	  nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getImageGenerator(), g_Capture.ImageFormat);
	  START_CAPTURE_CHECK_RC(nRetVal, "add image node");
  }

  if (isIROn() && (g_Capture.IRFormat != CODEC_DONT_CAPTURE))
  {
	  nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getIRGenerator(), g_Capture.IRFormat);
	  START_CAPTURE_CHECK_RC(nRetVal, "add IR stream");
  }

  if (isAudioOn() && (g_Capture.AudioFormat != CODEC_DONT_CAPTURE))
  {
	  nRetVal = g_Capture.pRecorder->AddNodeToRecording(*getAudioGenerator(), g_Capture.AudioFormat);
	  START_CAPTURE_CHECK_RC(nRetVal, "add Audio stream");
  }

  return true;
}
Exemple #2
0
void drawColorImage(IntRect* pLocation, IntPair* pPointer, int pointerRed, int pointerGreen, int pointerBlue)
{
	if (g_DrawConfig.Streams.bBackground)
		TextureMapDraw(&g_texBackground, pLocation);

	if (g_DrawConfig.Streams.Image.Coloring == IMAGE_OFF)
		return;

	if (!isImageOn() && !isIROn())
	{
		drawClosedStream(pLocation, "Image");
		return;
	}

	const MapMetaData* pImageMD;
	const XnUInt8* pImage = NULL;

	if (isImageOn())
	{
		pImageMD = getImageMetaData();
		pImage = getImageMetaData()->Data();
	}
	else if (isIROn())
	{
		pImageMD = getIRMetaData();
		pImage = (const XnUInt8*)getIRMetaData()->Data();
	}
	else
		return;

	if (pImageMD->FrameID() == 0)
	{
		return;
	}

	const DepthMetaData* pDepthMetaData = getDepthMetaData();

	double grayscale16Factor = 1.0;
	if (pImageMD->PixelFormat() == XN_PIXEL_FORMAT_GRAYSCALE_16_BIT)
	{
		int nPixelsCount = pImageMD->XRes()*pImageMD->YRes();
		XnUInt16* pPixel = (XnUInt16*)pImage;
		for (int i = 0; i < nPixelsCount; ++i,++pPixel)
		{
			if (*pPixel > g_nMaxGrayscale16Value)
				g_nMaxGrayscale16Value = *pPixel;
		}

		if (g_nMaxGrayscale16Value > 0)
		{
			grayscale16Factor = 255.0 / g_nMaxGrayscale16Value;
		}
	}

	for (XnUInt16 nY = pImageMD->YOffset(); nY < pImageMD->YRes() + pImageMD->YOffset(); nY++)
	{
		XnUInt8* pTexture = TextureMapGetLine(&g_texImage, nY) + pImageMD->XOffset()*4;

		if (pImageMD->PixelFormat() == XN_PIXEL_FORMAT_YUV422)
		{
			YUV422ToRGB888(pImage, pTexture, pImageMD->XRes()*2, g_texImage.Size.X*g_texImage.nBytesPerPixel);
			pImage += pImageMD->XRes()*2;
		}
		else
		{
			for (XnUInt16 nX = 0; nX < pImageMD->XRes(); nX++, pTexture+=4)
			{
				XnInt32 nDepthIndex = 0;

				if (pDepthMetaData != NULL)
				{
					XnDouble dRealX = (nX + pImageMD->XOffset()) / (XnDouble)pImageMD->FullXRes();
					XnDouble dRealY = nY / (XnDouble)pImageMD->FullYRes();

					XnUInt32 nDepthX = dRealX * pDepthMetaData->FullXRes() - pDepthMetaData->XOffset();
					XnUInt32 nDepthY = dRealY * pDepthMetaData->FullYRes() - pDepthMetaData->YOffset();

					if (nDepthX >= pDepthMetaData->XRes() || nDepthY >= pDepthMetaData->YRes())
					{
						nDepthIndex = -1;
					}
					else
					{
						nDepthIndex = nDepthY*pDepthMetaData->XRes() + nDepthX;
					}
				}

				switch (pImageMD->PixelFormat())
				{
				case XN_PIXEL_FORMAT_RGB24:
					pTexture[0] = pImage[0];
					pTexture[1] = pImage[1];
					pTexture[2] = pImage[2];
					pImage+=3; 
					break;
				case XN_PIXEL_FORMAT_GRAYSCALE_8_BIT:
					pTexture[0] = pTexture[1] = pTexture[2] = *pImage;
					pImage+=1; 
					break;
				case XN_PIXEL_FORMAT_GRAYSCALE_16_BIT:
					XnUInt16* p16 = (XnUInt16*)pImage;
					XnUInt8 textureValue = 0;
					textureValue = (XnUInt8)((*p16) * grayscale16Factor);
					pTexture[0] = pTexture[1] = pTexture[2] = textureValue;
					pImage+=2; 
					break;
				}

				// decide if pixel should be lit or not
				if (g_DrawConfig.Streams.Image.Coloring == DEPTH_MASKED_IMAGE &&
					(pDepthMetaData == NULL || nDepthIndex == -1 || pDepthMetaData->Data()[nDepthIndex] == 0))
				{
					pTexture[3] = 0;
				}
				else
				{
					pTexture[3] = 255;
				}
			}
		}
	}

	if (pPointer != NULL)
	{
		TextureMapDrawCursor(&g_texImage, *pPointer, pointerRed, pointerGreen, pointerBlue);
	}

	TextureMapUpdate(&g_texImage);
	TextureMapDraw(&g_texImage, pLocation);
}
Exemple #3
0
void drawFrame()
{
	// calculate locations
	g_DrawConfig.DepthLocation.uBottom = 0;
	g_DrawConfig.DepthLocation.uTop = WIN_SIZE_Y - 1;
	g_DrawConfig.DepthLocation.uLeft = 0;
	g_DrawConfig.DepthLocation.uRight = WIN_SIZE_X - 1;

	g_DrawConfig.ImageLocation.uBottom = 0;
	g_DrawConfig.ImageLocation.uTop = WIN_SIZE_Y - 1;
	g_DrawConfig.ImageLocation.uLeft = 0;
	g_DrawConfig.ImageLocation.uRight = WIN_SIZE_X - 1;

	if (g_DrawConfig.Streams.ScreenArrangement == SIDE_BY_SIDE)
	{
		g_DrawConfig.DepthLocation.uTop = WIN_SIZE_Y / 2 - 1;
		g_DrawConfig.DepthLocation.uRight = WIN_SIZE_X / 2 - 1;
		g_DrawConfig.ImageLocation.uTop = WIN_SIZE_Y / 2 - 1;
		g_DrawConfig.ImageLocation.uLeft = WIN_SIZE_X / 2;
	}

	// Texture map init
	const DepthMetaData* pDepthMD = getDepthMetaData();
	if (isDepthOn())
	{
		g_nMaxDepth = getDepthGenerator()->GetDeviceMaxDepth();
		TextureMapInit(&g_texDepth, pDepthMD->FullXRes(), pDepthMD->FullYRes(), 4, pDepthMD->XRes(), pDepthMD->YRes());
		fixLocation(&g_DrawConfig.DepthLocation, pDepthMD->FullXRes(), pDepthMD->FullYRes());
	}

	const MapMetaData* pImageMD = NULL;

	if (isImageOn())
	{
		pImageMD = getImageMetaData();
	}
	else if (isIROn())
	{
		pImageMD = getIRMetaData();
	}

	if (pImageMD != NULL)
	{
		TextureMapInit(&g_texImage, pImageMD->FullXRes(), pImageMD->FullYRes(), 4, pImageMD->XRes(), pImageMD->YRes());
		fixLocation(&g_DrawConfig.ImageLocation, pImageMD->FullXRes(), pImageMD->FullYRes());
	}

	// check if pointer is over a map
	bool bOverDepth = (pDepthMD != NULL) && isPointInRect(g_DrawUserInput.Cursor, &g_DrawConfig.DepthLocation);
	bool bOverImage = (pImageMD != NULL) && isPointInRect(g_DrawUserInput.Cursor, &g_DrawConfig.ImageLocation);
	bool bDrawDepthPointer = false;
	bool bDrawImagePointer = false;
	int imagePointerRed = 255;
	int imagePointerGreen = 0;
	int imagePointerBlue = 0;

	IntPair pointerInDepth;
	IntPair pointerInImage;

	if (bOverImage)
	{
		pointerInImage.X = (double)(g_DrawUserInput.Cursor.X - g_DrawConfig.ImageLocation.uLeft) / (g_DrawConfig.ImageLocation.uRight - g_DrawConfig.ImageLocation.uLeft + 1) * pImageMD->FullXRes();
		pointerInImage.Y = (double)(g_DrawUserInput.Cursor.Y - g_DrawConfig.ImageLocation.uBottom) / (g_DrawConfig.ImageLocation.uTop - g_DrawConfig.ImageLocation.uBottom + 1) * pImageMD->FullYRes();
		bDrawImagePointer = true;
	}

	if (bOverDepth)
	{
		pointerInDepth.X = (double)(g_DrawUserInput.Cursor.X - g_DrawConfig.DepthLocation.uLeft) / (g_DrawConfig.DepthLocation.uRight - g_DrawConfig.DepthLocation.uLeft + 1) * pDepthMD->FullXRes();
		pointerInDepth.Y = (double)(g_DrawUserInput.Cursor.Y - g_DrawConfig.DepthLocation.uBottom) / (g_DrawConfig.DepthLocation.uTop - g_DrawConfig.DepthLocation.uBottom + 1) * pDepthMD->FullYRes();

		// make sure we're in cropped area
		if (pointerInDepth.X >= pDepthMD->XOffset() && pointerInDepth.X < (pDepthMD->XOffset() + pDepthMD->XRes()) &&
			pointerInDepth.Y >= pDepthMD->YOffset() && pointerInDepth.Y < (pDepthMD->YOffset() + pDepthMD->YRes()))
		{
			bDrawDepthPointer = true;
			if (!bOverImage && g_DrawConfig.bShowPointer)
			{
				// try to translate depth pixel to image
				if (getImageCoordinatesForDepthPixel(pointerInDepth.X, pointerInDepth.Y, pointerInImage.X, pointerInImage.Y))
				{
					bDrawImagePointer = true;
					imagePointerRed = 0;
					imagePointerGreen = 0;
					imagePointerBlue = 255;
				}
			}
		}
	}

	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);

	// Setup the opengl env for fixed location view
	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();
	glOrtho(0,WIN_SIZE_X,WIN_SIZE_Y,0,-1.0,1.0);
	glDisable(GL_DEPTH_TEST); 

	if (g_DrawConfig.Streams.Depth.Coloring == CYCLIC_RAINBOW_HISTOGRAM || g_DrawConfig.Streams.Depth.Coloring == LINEAR_HISTOGRAM || g_DrawConfig.bShowPointer)
		calculateHistogram();

	drawColorImage(&g_DrawConfig.ImageLocation, bDrawImagePointer ? &pointerInImage : NULL, imagePointerRed, imagePointerGreen, imagePointerBlue);

	drawDepth(&g_DrawConfig.DepthLocation, bDrawDepthPointer ? &pointerInDepth : NULL);

	printStatisticsInfo();
	printRecordingInfo();

	if (g_DrawConfig.bShowPointer)
		drawPointerMode(bDrawDepthPointer ? &pointerInDepth : NULL);

	drawUserInput(!bOverDepth && !bOverImage);

	drawUserMessage();
	drawPlaybackSpeed();

	if (g_DrawConfig.strErrorState != NULL)
		drawErrorState();

	if (g_DrawConfig.bHelp)
		drawHelpScreen();

	glutSwapBuffers();
}