コード例 #1
0
ファイル: HelloNiTE2.cpp プロジェクト: seiketkm/nite2sample
void main(int argc, char* argv[])
{
    try{
        auto status = nite::NiTE::initialize();
        nite::UserTracker userTracker;
        status = userTracker.create();
        if ( status != nite::STATUS_OK ) {
            throw std::runtime_error( "userTracker.create" );
        }
        
        cv::Mat depthImage;

        while ( 1 ) {
            nite::UserTrackerFrameRef userFrame;
            userTracker.readFrame( &userFrame );

            depthImage = depthToImage( userFrame );
            drawUser( userFrame, depthImage );
            drawSkeleton( userFrame, userTracker, depthImage);
            cv::imshow( "User", depthImage );

            int key = cv::waitKey( 10 );
            if ( key == 'q' || key == 0x1b ) {
                break;
            }
        }
    }
    catch(std::exception&){
        std::cout << openni::OpenNI::getExtendedError() << std::endl;
    }
}
コード例 #2
0
ファイル: Scene.cpp プロジェクト: aurelijusb/CG-models
void Scene::draw() {
    GLfloat m[4][4];
    recalcModelView();
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    if (showAxis) {
        drawRotationAxis();
    }
    
    if (modelsMode) {        
        glScalef(6, 6, 6);        
        for (unsigned i=0; i < models.size(); i++) {
            models[i]->render();
        }
    } else {
        if (game) {
            drawObject(vertexes, exluded);
            drawExcluded();
        } else {
            drawObject(vertexes, -1);
        }

        if (skeleton) {
            drawSkeleton();
        }
    }
      
    glutSwapBuffers();
}
コード例 #3
0
ファイル: pedroG.cpp プロジェクト: nmusacco/Backup_Jumper
// all draw functions that get called in render funciton
void drawGame_Textures(Game * game)
{
	
	drawBackground(game);
	drawSpike(game);
	glBindTexture(GL_TEXTURE_2D, 0);	
	drawWater();
	drawPlatform(game,5);
	drawMissile(game);
	drawSkeleton(game);
	// used to allow non textures objects to maintain their color
	glBindTexture(GL_TEXTURE_2D, 0);	
}
コード例 #4
0
/**
  \brief Creates a QImage comprising the depth map
**/
QImage QKinectWrapper::createCameraImage()
{

	// Here must mutex / run also access the data
	xn::DepthMetaData dmd;
	xn::ImageMetaData imd;
	g_DepthGenerator.GetMetaData(dmd);
	g_ImageGenerator.GetMetaData(imd);

	XnUInt16 g_nXRes = dmd.XRes();
	XnUInt16 g_nYRes = dmd.YRes();

	QImage image(g_nXRes,g_nYRes,QImage::Format_RGB32);


	const XnUInt8 *idata = imd.Data();
	for (unsigned nY=0; nY<g_nYRes; nY++)
	{
		uchar *imageptr = image.scanLine(nY);

		for (unsigned nX=0; nX < g_nXRes; nX++)
		{
			imageptr[0] = idata[2];
			imageptr[1] = idata[1];
			imageptr[2] = idata[0];
			imageptr[3] = 0xff;

			imageptr+=4;
			idata+=3;

		}
	}
	QPainter painter;
	painter.begin(&image);
	if(displayInfoImage)
	{
		painter.setPen(textPen);
		painter.setFont(font);
		drawInfo(&painter);
	}
	if(displaySkeletonImage)
	{
		painter.setPen(skeletonPen);
		drawSkeleton(&painter);
	}
	painter.end();

	return image;

}
コード例 #5
0
void renderEditor(SDL_Renderer* renderer, EditorData* data)
{
	drawSkeleton(renderer, &data->view, { 400, 300 }, 0, &data->skeleton[0], data->skeleton.size());

	for (int i = 0; i < data->selectedBones.size(); ++i)
	{
		//SDL_RenderFillRect(renderer, (SDL_Rect*)&data->bones[data->selectedBones[i]].rect);

		fillRect(renderer, &data->view, data->bones[data->selectedBones[i]].rect);
	}


	GUI_drawGUI(data->gui, renderer);

}
コード例 #6
0
ファイル: display.c プロジェクト: M-Samoht/mocap-1
/* Called when GLUT wants to repaint the screen (we do all our rendering/geometry here) */
void display(void)
{
	float xCamera, yCamera, zCamera;	/* Camera coordinates */
	float xRoot, yRoot, zRoot;			/* Root position */
	
	/* Clear frame buffer and set up MODELVIEW matrix */
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);	
    glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();

	/* Calculate the camera position using polar coordinates */
	xCamera = rCamera*sin(thetaCamera)*cos(phiCamera);
	yCamera = rCamera*sin(thetaCamera)*sin(phiCamera);
	zCamera = rCamera*cos(thetaCamera);

	/* Calculate root postion */
	xRoot = gMo->root_pos[currentFrame].x;
	yRoot = -gMo->root_pos[currentFrame].z;
	zRoot = gMo->root_pos[currentFrame].y;

	if(initialPose) {

		/* Place the camera and draw the skeleton in its initial position */
		gluLookAt(xCamera, yCamera, zCamera, 0, 0, 0, 0, 0, 1);
		drawInitialPose(gSkel, referenceFrame);

	} else {

		/* Place camera at specified position and draw the skeleton under mocap data */
		gluLookAt(xCamera+xRoot, yCamera+yRoot, zCamera+zRoot, xRoot, yRoot, zRoot, 0, 0, 1);
		drawSkeleton(gSkel, gMo, currentFrame, referenceFrame);
	}


	drawFloor(140, 140);
	
	if(referenceFrame)
		drawReferenceFrame(20);
	
	/* Ensure any queued up OpenGL calls are run and swap buffers */
	glFlush();
	glutSwapBuffers();

}
コード例 #7
0
ファイル: Asset.cpp プロジェクト: Banderi/Antimony
void Asset::draw(mat *mat_world, bool debug)
{
	if (m_geometry.position.size() < 1)
		return;

	mat s = MScalVector(m_scale);
	mat r = MRotAxis(float3(1, 0, 0), -MATH_PI / 2);
	mat w = s * r * *mat_world;

	Antimony::DrawMesh(&m_geometry, &w);

	if (debug)
	{
		drawSkeleton(mat_world, false);

		/*auto sh_temp = sh_current;
		Antimony::setShader(SHADERS_PLAIN);

		auto c = WorldToScreen(V3Transform(v3_origin, *world), &(mat_view * mat_proj), float2(Antimony::display.width, Antimony::display.height));
		Draw2DDot(float2(c.x, c.y), 4, COLOR_GREEN);

		auto vertices = m_geometry.position.size();
		for (unsigned int v = 0; v < vertices; v++)
		{
			auto p = WorldToScreen(XMVector3Transform(m_geometry.position.at(v), *world), &(mat_view * mat_proj), float2(Antimony::display.width, Antimony::display.height));
			if (p.z > 0)
			{
				Draw2DDot(float2(p.x, p.y), 2, COLOR_RED);

				auto cfade = 255 * v / vertices;
				auto color = RGBA2DWORD(cfade, 0, 0, 255);
				Antimony::Consolas.render(std::to_wstring(v).c_str(), 12, Antimony::display.width / 2 + p.x + 1, Antimony::display.height / 2 + p.y + 1, color, NULL);
			}
		}
		Antimony::setShader(sh_temp);*/
	}
}
コード例 #8
0
ファイル: MyWindow.cpp プロジェクト: ayonga/dart
void MyWindow::drawWorld() const
{
  glEnable(GL_LIGHTING);
  glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
  Eigen::Vector4d color;
  color << 0.5, 0.8, 0.6, 1.0;
  drawSkeleton(mWorld->getSkeleton(0).get(), color, false);

  // draw arrow
  if (mImpulseDuration > 0)
  {
    dart::dynamics::SkeletonPtr Skeleton =
        static_cast<dart::dynamics::SkeletonPtr>(mWorld->getSkeleton(1));
    dart::dynamics::SoftBodyNode* softBodyNode = Skeleton->getSoftBodyNode(0);
    softBodyNode->addExtForce(mForceOnRigidBody);
    Eigen::Vector3d poa
        = softBodyNode->getTransform() * Eigen::Vector3d(0.0, 0.0, 0.0);
    Eigen::Vector3d start = poa - mForceOnRigidBody / 25.0;
    double len = mForceOnRigidBody.norm() / 25.0;
    dart::gui::drawArrow3D(start, mForceOnRigidBody, len, 0.025, 0.05);
  }

  SimWindow::drawWorld();
}
コード例 #9
0
int Read_Kinect::control_running()
{
	int rtn = 0;
	WaitForSingleObject(h1, INFINITE);
	//drawColor(h2);
	WaitForSingleObject(h3, INFINITE);
	drawDepth(h4);
	WaitForSingleObject(h5, INFINITE);
	//_depth.copyTo(_skeleton);
	//drawSkeleton(_skeleton);
	drawSkeleton();
	if (_remove_num > 0)
	{
		remove_cache();
	}

	fall_detection.falldetectiondistance(_avg_dis);
	_if_fall_down = fall_detection.get_fall_output_dis();

	//visualization_update();
	//imshow("Activity Recognition", _dispImg); 

	return rtn;
}
コード例 #10
0
bool UKinect::pollSkeleton() {
  if (NULL != sensor) {
    //
    // Attempt to get the color frame
    hr = sensor->NuiSkeletonGetNextFrame(0, &skeletonFrame);
    if (FAILED(hr)) {
      cerr << "[UKinect] WARNING: Skeleton pool." << endl;
      return false;
    }
    //
    // smooth out the skeleton data
    if (skeletonFilter.as<int>() == 0) {
      //sensor->NuiTransformSmooth(&skeletonFrame, NULL);
    } else if (skeletonFilter.as<int>() == 1) {
      //const NUI_TRANSFORM_SMOOTH_PARAMETERS DefaultParams = {0.5f, 0.5f, 0.5f, 0.05f, 0.04f};
      sensor->NuiTransformSmooth(&skeletonFrame, NULL);
    } else if (skeletonFilter.as<int>() == 2) {
      const NUI_TRANSFORM_SMOOTH_PARAMETERS SomewhatLatentParams = { 0.5f, 0.1f, 0.5f, 0.1f, 0.1f };
      sensor->NuiTransformSmooth(&skeletonFrame, &SomewhatLatentParams);
    } else {
      const NUI_TRANSFORM_SMOOTH_PARAMETERS VerySmoothParams = { 0.7f, 0.3f, 1.0f, 1.0f, 1.0f };
      sensor->NuiTransformSmooth(&skeletonFrame, &VerySmoothParams);
    }
    //
    // process skeleton frame if interaction function enabled
    if (interaction) {
      Vector4 v;
      sensor->NuiAccelerometerGetCurrentReading(&v);
      hr = interactionStream->ProcessSkeleton(NUI_SKELETON_COUNT,
        skeletonFrame.SkeletonData,
        &v,
        skeletonFrame.liTimeStamp);
      if (FAILED(hr)) {
        cerr << "[UKinect] ERROR: Process skeleton failed (for interaction purpose)." << endl;
        return false;
      }
    }


    vector<int> skelIDs;

    // these are used in face tracking
    vector<int> skelTrackedIDs = UpdateTrackedSkeletons();   // << use this to set tracked

    for (int i = 0; i < NUI_SKELETON_COUNT; ++i) {
      NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState;
      if (NUI_SKELETON_POSITION_ONLY == trackingState) skelIDs.push_back((int)skeletonFrame.SkeletonData[i].dwTrackingID);
    }

    // Save vectors to UVars
    skeletonIDs = skelIDs;
    skeletonTrackedIDs = skelTrackedIDs;

    if (skeletonVisualization) {

      DWORD t_width, t_height;
      NuiImageResolutionToSize((NUI_IMAGE_RESOLUTION)colorResolution.as<int>(), t_width, t_height);

      if
        ((color) && (skeletonVisualizationOnColor.as<int>())) skeletonCVMat = colorCVMat.clone(); // use color image as a background if color function enabled
      else
        skeletonCVMat = Mat(Size(static_cast<int>(t_width), static_cast<int>(t_height)), CV_8UC3, CV_RGB(0, 0, 0));

      for (int i = 0; i < NUI_SKELETON_COUNT; ++i) {
        NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState;

        if (NUI_SKELETON_TRACKED == trackingState) {
          // We're tracking the skeleton, draw it
          drawSkeleton(skeletonFrame.SkeletonData[i]);
          drawPosition(skeletonFrame.SkeletonData[i]);
        } else if (NUI_SKELETON_POSITION_ONLY == trackingState) {
          // we've only received the center point of the skeleton, draw that
          drawPosition(skeletonFrame.SkeletonData[i]);
        }

        drawOutOfFrame(skeletonFrame.SkeletonData[i]);
      }

      // Save CV image to UImage
      skeletonBin.image.width = skeletonCVMat.cols;
      skeletonBin.image.height = skeletonCVMat.rows;
      skeletonBin.image.size = skeletonCVMat.cols * skeletonCVMat.rows * 3;
      skeletonBin.image.data = skeletonCVMat.data;
      skeletonImage = skeletonBin;
    }

    return true;

  }
  cerr << "[UKinect] ERROR: Skeleton pool error." << endl;
  return false;
}
コード例 #11
0
ファイル: view.cpp プロジェクト: evanw/cs224final
void View::paintGL()
{
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    camera3D();

    // Don't paint if we haven't gotten a resize yet
#ifdef USE_SHADER_MATERIALS
    if (normalDepthTexture.getWidth() * normalDepthTexture.getHeight() == 0)
        return;
#endif

    // position lights
    float position0[4] = { 0, 1, 0, 0 };
    float position1[4] = { 0, -1, 0, 0 };
    glLightfv(GL_LIGHT0, GL_POSITION, position0);
    glLightfv(GL_LIGHT1, GL_POSITION, position1);

    if (mode == MODE_SCULPT_MESH)
    {
#ifdef USE_SHADER_MATERIALS
        normalDepthTexture.startDrawingTo(depthTexture);
        normalDepthShader.use();
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        drawMesh(true);
        normalDepthShader.unuse();
        normalDepthTexture.stopDrawingTo();

        camera2D();
        glDepthFunc(GL_ALWAYS);
        normalDepthTexture.bind(0);
        depthTexture.bind(1);
        finalCompositeShaders[currentMaterial].use();
        finalCompositeShaders[currentMaterial].uniform("windowSize", width(), height());
        finalCompositeShaders[currentMaterial].texture("depthTexture", 1);
        drawFullscreenQuad();
        finalCompositeShaders[currentMaterial].unuse();
        depthTexture.unbind(1);
        normalDepthTexture.unbind(0);
        glDepthFunc(GL_LESS);
        camera3D();
#else
        drawMesh(true);
#endif
        drawGroundPlane();
    }
    else if (mode == MODE_VIEW_MESH || mode == MODE_ANIMATE_MESH)
    {
        drawMesh(false);
        drawGroundPlane();
        drawSkeleton(true);
    }
    else
    {
        drawSkeleton(false);
        drawGroundPlane();
    }

    if (drawToolDebug)
        foreach (Tool *tool, tools)
            tool->drawDebug(mouseX, mouseY);
}
コード例 #12
0
/**
  \brief Creates a QImage comprising the depth map
**/
QImage QKinectWrapper::createDepthImage()
{
	// Here must mutex / run also access the data
	xn::SceneMetaData smd;
	xn::DepthMetaData dmd;
	g_DepthGenerator.GetMetaData(dmd);
	g_UserGenerator.GetUserPixels(0, smd);

	XnUInt16 g_nXRes = dmd.XRes();
	XnUInt16 g_nYRes = dmd.YRes();

	QImage image(g_nXRes,g_nYRes,QImage::Format_RGB32);


	const XnDepthPixel* pDepth = dmd.Data();
	const XnLabel* pLabels = smd.Data();

	// Compute stats
	/*unsigned max,min;
	max = pDepth[0];
	min = 0;
	for (unsigned i=0; i<g_nYRes*g_nXRes; i++)
	{
		if(pDepth[i]>max)
			max = pDepth[i];
		if(pDepth[i]!=0)
		{
			if(min==0)
				min = pDepth[i];
			else
				if(pDepth[i]<min)
					min = pDepth[i];
		}
	}
	printf("Depth min/max: %u %u\n",min,max);*/

	for (unsigned nY=0; nY<g_nYRes; nY++)
	{
		uchar *imageptr = image.scanLine(nY);

		for (unsigned nX=0; nX < g_nXRes; nX++)
		{
			unsigned depth = *pDepth;
			unsigned label = *pLabels;


			unsigned maxdist=10000;
			if(depth>maxdist) depth=maxdist;
			if(depth)
			{
				depth = (maxdist-depth)*255/maxdist+1;
			}
			// depth: 0: invalid
			// depth: 255: closest
			// depth: 1: furtherst (maxdist distance)


			if(label)
			{
				imageptr[0] = BodyColors[label][0]*2*depth/255;
				imageptr[1] = BodyColors[label][1]*2*depth/255;
				imageptr[2] = BodyColors[label][2]*2*depth/255;
				imageptr[3] = 0xff;
			}
			else
			{
				// Here we could do depth*color, to show the colored depth
				imageptr[0] = depth;
				imageptr[1] = depth;
				imageptr[2] = depth;
				imageptr[3] = 0xff;
			}
			pDepth++;
			imageptr+=4;
			pLabels++;
		}
	}


	QPainter painter;
	painter.begin(&image);
	if(displayInfoDepth)
	{
		painter.setPen(textPen);
		painter.setFont(font);
		drawInfo(&painter);
	}
	if(displaySkeletonDepth)
	{
		painter.setPen(skeletonPen);
		drawSkeleton(&painter);
	}
	painter.end();
	return image;

}
コード例 #13
0
ファイル: Viewer.cpp プロジェクト: dingnigefei/healthcare
void SampleViewer::Display()
{
    if (g_pause)
        return;
    
	nite::UserTrackerFrameRef userTrackerFrame;
    nite::Status rc1 = m_pUserTracker->readFrame(&userTrackerFrame);
	if (rc1 != nite::STATUS_OK) {
		printf("GetNextData failed\n");
		return;
	}
    
	openni::VideoFrameRef depthFrameSide = userTrackerFrame.getDepthFrame();
    int height = depthFrameSide.getHeight();
    int width = depthFrameSide.getWidth();

    if (!label) {
    	label = (int *)malloc(width*height*sizeof(int));
    }
    
    openni::VideoFrameRef depthFrameTop;
    openni::Status rc2 = depthStreamTop.readFrame(&depthFrameTop);
	if (rc2 != openni::STATUS_OK) {
		printf("GetNextData failed\n");
		return;
	}

	if (m_pTexMap == NULL)
	{
		// Texture map init
		m_nTexMapX = MIN_CHUNKS_SIZE(depthFrameSide.getVideoMode().getResolutionX(), TEXTURE_SIZE);
		m_nTexMapY = MIN_CHUNKS_SIZE(depthFrameSide.getVideoMode().getResolutionY(), TEXTURE_SIZE);
		m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY];
	}

	const nite::UserMap& userLabels = userTrackerFrame.getUserMap();

	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();
	glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0);

	if (depthFrameSide.isValid() && g_drawDepth)
	{
		calculateHistogram(m_pDepthHistSide, MAX_DEPTH, depthFrameSide);
	}

	memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel));

	float factor[3] = {1, 1, 1};
	// check if we need to draw depth frame to texture
	if (depthFrameSide.isValid() && g_drawDepth)
	{
		const nite::UserId* pLabels = userLabels.getPixels();

		const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrameSide.getData();
		openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrameSide.getCropOriginY() * m_nTexMapX;
		int rowSize = depthFrameSide.getStrideInBytes() / sizeof(openni::DepthPixel);
        
		for (int y = 0; y < height; ++y)
		{
			const openni::DepthPixel* pDepth = pDepthRow;
			openni::RGB888Pixel* pTex = pTexRow + depthFrameSide.getCropOriginX();

			for (int x = 0; x < width; ++x, ++pDepth, ++pTex, ++pLabels)
			{
				if (*pDepth != 0)
				{
					if (*pLabels == 0)
					{
						if (!g_drawBackground)
						{
							factor[0] = factor[1] = factor[2] = 0;

						}
						else
						{
							factor[0] = Colors[colorCount][0];
							factor[1] = Colors[colorCount][1];
							factor[2] = Colors[colorCount][2];
						}
					}
					else
					{
						factor[0] = Colors[*pLabels % colorCount][0];
						factor[1] = Colors[*pLabels % colorCount][1];
						factor[2] = Colors[*pLabels % colorCount][2];
					}
//					// Add debug lines - every 10cm
// 					else if ((*pDepth / 10) % 10 == 0)
// 					{
// 						factor[0] = factor[2] = 0;
// 					}

					int nHistValue = m_pDepthHistSide[*pDepth];
					pTex->r = nHistValue*factor[0];
					pTex->g = nHistValue*factor[1];
					pTex->b = nHistValue*factor[2];

					factor[0] = factor[1] = factor[2] = 1;
				}
			}

			pDepthRow += rowSize;
			pTexRow += m_nTexMapX;
		}
	}
    
    const openni::DepthPixel *imgBufferSide = (const openni::DepthPixel *)depthFrameSide.getData();
    const openni::DepthPixel *imgBufferTop = (const openni::DepthPixel *)depthFrameTop.getData();
    calculateHistogram(m_pDepthHistTop, MAX_DEPTH, depthFrameTop);
    imgTop = Mat(depthFrameTop.getHeight(), depthFrameTop.getWidth(), CV_8UC3);
    Mat(depthFrameTop.getHeight(), depthFrameTop.getWidth(), CV_16U, (void *)imgBufferTop).convertTo(depthTop, CV_8U, 1.0/256);
    
    for (int i = 0; i < imgTop.rows; i++) {
        for (int j = 0; j < imgTop.cols; j++) {
            int val = (int)m_pDepthHistTop[imgBufferTop[j + i*imgTop.cols]];
            imgTop.at<Vec3b>(i, j).val[0] = val;
            imgTop.at<Vec3b>(i, j).val[1] = val;
            imgTop.at<Vec3b>(i, j).val[2] = val;
        }
    }
    
    if (g_getBackground)
        bgSubtractor->processImages(depthTop);
    bgSubtractor->getMask(depthTop, mask);
    imshow("Mask", mask);

	glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap);

	// Display the OpenGL texture map
	glColor4f(1,1,1,1);

	glEnable(GL_TEXTURE_2D);
	glBegin(GL_QUADS);

    // 320x240
	g_nXRes = depthFrameSide.getVideoMode().getResolutionX();
	g_nYRes = depthFrameSide.getVideoMode().getResolutionY();

	// upper left
	glTexCoord2f(0, 0);
	glVertex2f(0, 0);
	// upper right
	glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, 0);
	glVertex2f(GL_WIN_SIZE_X, 0);
	// bottom right
	glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, (float)g_nYRes/(float)m_nTexMapY);
	glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y);
	// bottom left
	glTexCoord2f(0, (float)g_nYRes/(float)m_nTexMapY);
	glVertex2f(0, GL_WIN_SIZE_Y);

	glEnd();
	glDisable(GL_TEXTURE_2D);

	const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers();
    float maxSize = -1;
    int maxIdx = -1;
    
    for (int i = 0; i < users.getSize(); ++i) {
        const nite::UserData &user = users[i];
        
        if (!user.isVisible())
            continue;
        
        if (getSize(user) > maxSize) {
            maxSize = getSize(user);
            maxIdx = i;
        }
        //printf("user %d: size=%f\n, lost=%d, new=%d, visible=%d\n",
        //       i, getSize(user), user.isLost(), user.isNew(), user.isVisible());
    }
    
	for (int i = 0; i < users.getSize(); ++i)
	{
		const nite::UserData &user = users[i];

		updateUserState(user, userTrackerFrame.getTimestamp());
		if (user.isNew())
		{
			m_pUserTracker->startSkeletonTracking(user.getId());
			m_pUserTracker->startPoseDetection(user.getId(), nite::POSE_CROSSED_HANDS);
		}
		else if (!user.isLost())
		{
			if (g_drawStatusLabel) {
				DrawStatusLabel(m_pUserTracker, user);
			}
            
            if (g_drawCenterOfMass) {
				DrawCenterOfMass(m_pUserTracker, user);
			}
            
			if (g_drawBoundingBox) {
				DrawBoundingBox(user);
			}

			if (users[i].getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawSkeleton) {
                if (maxIdx == i) {
                    DrawSkeleton(m_pUserTracker, user);
                    sideSkel.setTo(Scalar(0, 0, 0));
                    drawSkeleton(sideSkel, sideJoints);
                    topSkel.setTo(Scalar(0, 0, 0));
                    drawSkeleton(topSkel, topJoints);
                    drawSkeleton(imgTop, topJoints);
                }
			}
		}

        // exit the program after a few seconds if PoseType == POSE_CROSSED_HANDS
		if (m_poseUser == 0 || m_poseUser == user.getId())
		{
			const nite::PoseData& pose = user.getPose(nite::POSE_CROSSED_HANDS);

			if (pose.isEntered())
			{
				// Start timer
				sprintf(g_generalMessage, "In exit pose. Keep it for %d second%s to exit\n", g_poseTimeoutToExit/1000, g_poseTimeoutToExit/1000 == 1 ? "" : "s");
				printf("Counting down %d second to exit\n", g_poseTimeoutToExit/1000);
				m_poseUser = user.getId();
				m_poseTime = userTrackerFrame.getTimestamp();
			}
			else if (pose.isExited())
			{
				memset(g_generalMessage, 0, sizeof(g_generalMessage));
				printf("Count-down interrupted\n");
				m_poseTime = 0;
				m_poseUser = 0;
			}
			else if (pose.isHeld())
			{
				// tick
				if (userTrackerFrame.getTimestamp() - m_poseTime > g_poseTimeoutToExit * 1000)
				{
					printf("Count down complete. Exit...\n");
					Finalize();
					exit(2);
				}
			}
		}
	}

	if (g_drawFrameId)
	{
		DrawFrameId(userTrackerFrame.getFrameIndex());
	}

	if (g_generalMessage[0] != '\0')
	{
		char *msg = g_generalMessage;
		glColor3f(1.0f, 0.0f, 0.0f);
		glRasterPos2i(100, 20);
		glPrintString(GLUT_BITMAP_HELVETICA_18, msg);
	}

    imshow("Side", sideSkel);
    imshow("Top", topSkel);
    imshow("DepthTop", imgTop);
    
    if (!g_getBackground) {
        knnsearch(topJoints, imgBufferTop, mask, labelTop, label, 320, 240);
        drawSkeleton(labelTop, topJoints);
        cv::resize(labelTop, labelTop, Size(), 2, 2);
        imshow("Label", labelTop);
        
        if (g_capture2) {
            // c style
            string path = outDir + "/depth-top" + to_string(nFrame) + ".txt";
            FILE *f = fopen(path.c_str(), "w");
            for (int i = 0; i < width*height; i++) {
                fprintf(f, "%u\n", imgBufferTop[i]);
            }
            fclose(f);
            
            path = outDir + "/depth-side" + to_string(nFrame) + ".txt";
            f = fopen(path.c_str(), "w");
            for (int i = 0; i < width*height; i++) {
                fprintf(f, "%u\n", imgBufferSide[i]);
            }
            fclose(f);
            
            path = outDir + "/joints-top" + to_string(nFrame) + ".txt";
            f = fopen(path.c_str(), "w");
            for (int i = 0; i < N_JOINTS; i++) {
                fprintf(f, "%f, %f, %f, %f, %f\n", topJoints[i][0], topJoints[i][1],
                        topJoints[i][2], topJoints[i][3], topJoints[i][4]);
            }
            fclose(f);
            
            path = outDir + "/joints-side" + to_string(nFrame) + ".txt";
            f = fopen(path.c_str(), "w");
            for (int i = 0; i < N_JOINTS; i++) {
                fprintf(f, "%f, %f, %f, %f, %f\n", sideJoints[i][0], sideJoints[i][1],
                        sideJoints[i][2], sideJoints[i][3], sideJoints[i][4]);
            }
            fclose(f);

            path = outDir + "/label-top" + to_string(nFrame) + ".txt";
            f = fopen(path.c_str(), "w");
            for (int i = 0; i < width*height; i++) {
                fprintf(f, "%d\n", label[i]);
            }
            fclose(f);
            
            path = outDir + "/label-side" + to_string(nFrame) + ".txt";
            f = fopen(path.c_str(), "w");
            const nite::UserId* labelsTop = userLabels.getPixels();
            for (int i = 0; i < width*height; i++) {
                fprintf(f, "%d\n", (int)labelsTop[i]);
            }
            fclose(f);
            
            nFrame++;
        }
    }
    
    // Swap the OpenGL display buffers
	glutSwapBuffers();
}
コード例 #14
0
ファイル: ski.cpp プロジェクト: jihyunlee/Flying-Penguin
void Ski::draw(bool show) {
    if(show) drawSkeleton();
    else     drawCreature();    
}