Esempio n. 1
0
			//----------
			void KinectV2::drawObject() {
				if (this->device) {
					switch (this->viewType.get()) { // don't break on the cases, flow through
					case 2:
						//this should be something like 'draw pretty mesh'
						//something seems to have been missed out of a merge in ofxKinectForWindows2
						this->device->drawWorld();
					case 1:
					{
						auto bodySource = this->device->getBodySource();
						if (bodySource) {
							bodySource->drawWorld();
						}
					}
					case 0:
					{
						ofPushStyle();
						ofSetColor(this->getColor());
						ofNoFill();
						ofSetLineWidth(1.0f);
						auto depthSource = this->device->getDepthSource();
						if (depthSource) {
							depthSource->drawFrustum();
						}
						auto colorSource = this->device->getColorSource();
						if (colorSource) {
							colorSource->drawFrustum();
						}
						ofPopStyle();
					}
					default:
						break;
					}
				}
			}
Esempio n. 2
0
///////////////////////////////////////////////////////////////////////////////
// draw bottom window (3rd person view)
///////////////////////////////////////////////////////////////////////////////
void ModelGL::drawSub2()
{
    // set bottom viewport
    setViewportSub(0, 0, windowWidth, windowHeight/2, 1, 100);

    // clear buffer
    glClearColor(bgColor[0], bgColor[1], bgColor[2], bgColor[3]);   // background color
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);

    glPushMatrix();

    // First, transform the camera (viewing matrix) from world space to eye space
    glTranslatef(0, 0, -cameraDistance);
    glRotatef(cameraAngleX, 1, 0, 0); // pitch
    glRotatef(cameraAngleY, 0, 1, 0); // heading

    // draw grid
    drawGrid(10, 1);

    // draw balls
    drawSpheres();

    // draw the camera
    glPushMatrix();
    glTranslatef(0, 0, 7);
    drawCamera();
    drawFrustum(projectionLeft, projectionRight, projectionBottom, projectionTop, projectionNear, projectionFar);
    glPopMatrix();

    glPopMatrix();
}
Esempio n. 3
0
void LineRenderer::drawFrustum(const glm::mat4& view, const glm::mat4& proj, const glm::vec3& color)
{
    glm::vec3 frustumCornersWS[8] =
	{
	    glm::vec3(-1.0f,  1.0f, -1.0f),
	    glm::vec3( 1.0f,  1.0f, -1.0f),
	    glm::vec3( 1.0f, -1.0f, -1.0f),
	    glm::vec3(-1.0f, -1.0f, -1.0f),
	    glm::vec3(-1.0f,  1.0f, 1.0f),
	    glm::vec3( 1.0f,  1.0f, 1.0f),
	    glm::vec3( 1.0f, -1.0f, 1.0f),
	    glm::vec3(-1.0f, -1.0f, 1.0f)
	};

    glm::mat4 invViewProj = glm::inverse(proj * view);
    for(uint32_t i = 0; i < 8; ++i)
    {
	glm::vec4 v = invViewProj * glm::vec4(frustumCornersWS[i].xyz, 1.0);
	v.x /= v.w;
	v.y /= v.w;
	v.z /= v.w;
	frustumCornersWS[i] = v.xyz;
    }

    drawFrustum(frustumCornersWS, color);	
}
Esempio n. 4
0
///////////////////////////////////////////////////////////////////////////////
// draw frustum
///////////////////////////////////////////////////////////////////////////////
void ModelGL::drawFrustum(float fovY, float aspectRatio, float nearPlane, float farPlane)
{
    float tangent = tanf(fovY/2 * DEG2RAD);
    float nearHeight = nearPlane * tangent;
    float nearWidth = nearHeight * aspectRatio;

    drawFrustum(-nearWidth, nearWidth, -nearHeight, nearHeight, nearPlane, farPlane);
}
Esempio n. 5
0
void ofApp::draw(){

	ofSetWindowTitle(ofToString(ofGetFrameRate(), 1) + "fps"); 
	ofSetColor(255);
	if (bDraw3D) {

		mesh = kinect.getDepth()->getMesh(pointCloudOpts);

		ofEnableDepthTest();

		camera.update();
		camera.begin();

		ofPushMatrix();
		ofScale(100, 100, 100);
		{
			drawFloor();
			if (bDrawSensorDebug) drawSensorPosition();

			ofPushMatrix();
			ofMultMatrix(kinect.getBodyFrame()->getFloorTransform());
			{ 
				if (bDrawMesh) drawMesh();
				drawBodies();
				if (bDrawSensorDebug) drawFrustum();
			}
			ofPopMatrix();
		}
		ofPopMatrix();

		camera.end();

		ofDisableDepthTest();
	}
	else {
		kinect.getColor()->draw(0, 0, ofGetWidth(), ofGetHeight());
		kinect.getBodyFrame()->drawProjected(0, 0, ofGetWidth(), ofGetHeight());
	}
	drawFaceFeatures();
	drawGestures();
}
Esempio n. 6
0
///////////////////////////////////////////////////////////////////////////////
// draw bottom window (3rd person view)
///////////////////////////////////////////////////////////////////////////////
void ModelGL::drawSub2()
{
    // set bottom viewport
    setViewportSub(0, 0, windowWidth, windowHeight/2, NEAR_PLANE, FAR_PLANE);

    // clear buffer
    glClearColor(bgColor[0], bgColor[1], bgColor[2], bgColor[3]);   // background color
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);

    glPushMatrix();

    // First, transform the camera (viewing matrix) from world space to eye space
    glTranslatef(0, 0, -cameraDistance);
    glRotatef(cameraAngleX, 1, 0, 0); // pitch
    glRotatef(cameraAngleY, 0, 1, 0); // heading

    // draw grid
    drawGrid(10, 1);

    // draw a teapot
    glPushMatrix();
    glTranslatef(modelPosition[0], modelPosition[1], modelPosition[2]);
    glRotatef(modelAngle[0], 1, 0, 0);
    glRotatef(modelAngle[1], 0, 1, 0);
    glRotatef(modelAngle[2], 0, 0, 1);
    drawAxis(4);
    drawTeapot();
    glPopMatrix();

    // draw the camera
    glPushMatrix();
    glTranslatef(cameraPosition[0], cameraPosition[1], cameraPosition[2]);
    glRotatef(cameraAngle[0], 1, 0, 0);
    glRotatef(cameraAngle[1], 0, 1, 0);
    glRotatef(cameraAngle[2], 0, 0, 1);
    drawCamera();
    drawFrustum(FOV_Y, 1, 1, 10);
    glPopMatrix();

    glPopMatrix();
}
Esempio n. 7
0
void RenderLightProjection::render(const RenderInfo& info) const {

	// greebo: These four define the base area and are always needed to draw the light
	// Note the minus sign before intersectPlanes (the points have to be mirrored against the origin)
	Vector3 bottomUpRight = -Plane3::intersect(_frustum.left, _frustum.top, _frustum.back);
	Vector3 bottomDownRight = -Plane3::intersect(_frustum.left, _frustum.bottom, _frustum.back);
	Vector3 bottomUpLeft = -Plane3::intersect(_frustum.right, _frustum.top, _frustum.back);
	Vector3 bottomDownLeft = -Plane3::intersect(_frustum.right, _frustum.bottom, _frustum.back);

	// The planes of the frustum are measured at world 0,0,0 so we have to position the intersection points relative to the light origin
	bottomUpRight += _origin;
	bottomDownRight += _origin;
	bottomUpLeft += _origin;
	bottomDownLeft += _origin;

	if (_start != Vector3(0,0,0)) {
		// Calculate the vertices defining the top area
		// Again, note the minus sign
		Vector3 topUpRight = -Plane3::intersect(_frustum.left, _frustum.top, _frustum.front);
		Vector3 topDownRight = -Plane3::intersect(_frustum.left, _frustum.bottom, _frustum.front);
		Vector3 topUpLeft = -Plane3::intersect(_frustum.right, _frustum.top, _frustum.front);
		Vector3 topDownLeft = -Plane3::intersect(_frustum.right, _frustum.bottom, _frustum.front);

		topUpRight += _origin;
		topDownRight += _origin;
		topUpLeft += _origin;
		topDownLeft += _origin;

		Vector3 frustum[8] = { topUpRight, topDownRight, topDownLeft, topUpLeft,
							   bottomUpRight, bottomDownRight, bottomDownLeft, bottomUpLeft };
		drawFrustum(frustum);
	}
	else {
		// no light_start, just use the top vertex (doesn't need to be mirrored)
		Vector3 top = Plane3::intersect(_frustum.left, _frustum.right, _frustum.top);
		top += _origin;

		Vector3 pyramid[5] = { top, bottomUpRight, bottomDownRight, bottomDownLeft, bottomUpLeft };
		drawPyramid(pyramid);
	}
}
/**
 *	This is called when the control is repainted.
 */
/*afx_msg*/ void ChunkWatchControl::OnPaint()
{
	ScopedDogWatch dogWatchScope(s_chunkWatchPaint);

	// The extents of the control:
	CRect fullExtents;
	GetClientRect(fullExtents);

	// The drawing contexts:
	CPaintDC				paintDC(this);
	controls::MemDCScope	memDCScope(memDC_, paintDC, &fullExtents);

	memDC_.FillSolidRect(fullExtents, ::GetSysColor(COLOR_BTNFACE));

	// Calculate the drawing area:
	getDrawConstants();

	// Draw the details:
	if ((drawOptions_ & DRAW_PROJECTVIEW) != 0) drawProject   (memDC_);
	if ((drawOptions_ & DRAW_CHUNKS     ) != 0) drawChunks    (memDC_);
	if ((drawOptions_ & DRAW_UNLOADABLE ) != 0) drawUnloadable(memDC_);
	if ((drawOptions_ & DRAW_WORKING    ) != 0) drawWorking   (memDC_);
	if ((drawOptions_ & DRAW_GRID       ) != 0) drawGrid      (memDC_);
	if ((drawOptions_ & DRAW_USERPOS    ) != 0) drawArrow     (memDC_);
	if ((drawOptions_ & DRAW_FRUSTUM    ) != 0) drawFrustum   (memDC_);

	// Draw the outsides:
	memDC_.Draw3dRect
	(
		extents_, // around the tiles itself
		CLR_BORDER,
		CLR_BORDER
	);
	memDC_.Draw3dRect
	(
		fullExtents, 
		::GetSysColor(COLOR_3DHILIGHT),
		::GetSysColor(COLOR_3DSHADOW )
	);
}
Esempio n. 9
0
void OpenGLTransformation::drawSub2()
{
  glViewport(windowWidth / 2, 0, windowWidth / 2, windowHeight);
  glScissor(windowWidth / 2, 0, windowWidth / 2, windowHeight);

  glClearColor(0.f, 0.f, 0.f, 1);
  glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);

  MatStack.matrixMode(tgt::MatrixStack::PROJECTION);
  MatStack.pushMatrix();
  MatStack.loadMatrix(cameraRight->getProjectionMatrix(glm::ivec2(windowWidth * 0.5f, windowHeight)));
  MatStack.matrixMode(tgt::MatrixStack::MODELVIEW);
  MatStack.pushMatrix();
  MatStack.loadMatrix(cameraRight->getViewMatrix());

  // always draw the grid at the origin (before any modeling transform)
  drawGrid(10, 1);
  drawAxis(4);

  glm::mat4 cubeModel = glm::translate(glm::vec3(-0.5f));
  MatStack.pushMatrix();
  MatStack.loadMatrix(cameraRight->getViewMatrix() * cubeModel);
  drawCube();
  MatStack.popMatrix();

  // draw the camera
  glm::mat4 camModel = glm::translate(camera->getPosition()) * glm::inverse(camera->getRotateMatrix());
  MatStack.pushMatrix();
  MatStack.loadMatrix(cameraRight->getViewMatrix() * camModel);
  drawCamera();
  drawFrustum(camera->getFovy(), camera->getRatio(), camera->getNearDist(), camera->getFarDist());
  MatStack.popMatrix();

  MatStack.matrixMode(tgt::MatrixStack::PROJECTION);
  MatStack.popMatrix();
  MatStack.matrixMode(tgt::MatrixStack::MODELVIEW);
  MatStack.popMatrix();
}
Esempio n. 10
0
void pgDrawBeak() {
    drawFrustum(0.1, 0.1, 0.75);
}
void LLFloaterExperiencePicker::draw()
{
	drawFrustum();
	LLFloater::draw();
}
Esempio n. 12
0
//--------------------------------------------------------------
void ofApp::draw(){
	if (path_est.size() > 0)
	{
		cam.begin();
		ofPushStyle();
		/// Render points as 3D cubes
		for (size_t i = 0; i < point_cloud_est.size(); ++i)
		{
			cv::Vec3d point = point_cloud_est[i];
			cv::Affine3d point_pose(cv::Mat::eye(3, 3, CV_64F), point);
			
			char buffer[50];
			sprintf(buffer, "%d", static_cast<int>(i));

			ofBoxPrimitive box;
			ofSetLineWidth(2.0f);
			ofSetColor(ofColor::blue);
			box.set(0.1, 0.1, -0.1);
			box.setPosition(point[0], point[1], point[2]);
			box.drawWireframe();	
		}
		ofPopStyle();
		cam.end();

		cv::Affine3d cam_pose = path_est[idx];
		
		cv::Matx44d mat44 = cam_pose.matrix;
			ofMatrix4x4 m44(mat44(0, 0), mat44(1, 0), mat44(2, 0), mat44(3, 0),
				mat44(0, 1), mat44(1, 1), mat44(2, 1), mat44(3, 1),
				mat44(0, 2), mat44(1, 2), mat44(2, 2), mat44(3, 2),
				mat44(0, 3), mat44(1, 3), mat44(2, 3), mat44(3, 3));

		if (camera_pov) {	
			cam.setPosition(m44.getTranslation());
			cam.lookAt(ofVec3f(0,0,1)*m44, ofVec3f(mat44(1,0),mat44(1,1), mat44(1,2)));
		}
		else
		{
			std::vector<ofPoint> path;
			for (int i = 0; i < path_est.size()-1; i++) {
				cv::Vec3d point = path_est[i].translation();
				path.push_back(ofPoint(point[0], point[1], point[2]));
			}
			ofPolyline trajectory(path);
			
			// render complete trajectory
			cam.begin();
			ofSetColor(ofColor::green);
			trajectory.draw();
			ofSetColor(ofColor::yellow);
			ofPushMatrix();
			ofMultMatrix(m44);
			ofDrawAxis(0.25);
			drawFrustum(f, cx, cy, 0.025, 0.4);
			ofPopMatrix();
			cam.end();
		}

		// update trajectory index (spring effect)
		forw *= (idx == n-1 || idx == 0) ? -1 : 1; idx += forw;
	}
}
////////////////////////////////////////////////////////////////////////////
// CHapticViewerView drawing
//
void CHapticViewerView::OnDraw(CDC* pDC)
{
    CHapticViewerDoc* pDoc = GetDocument();

    // Ceck pauseDraw to see if drawing should be paused to allow the user to 
    // respond to an error message box.
    if (pDoc->m_pauseDraw) return;

    Mesh* pObj = pDoc->getObj();

    wglMakeCurrent(m_hDC,m_hRC);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    if (pObj)
    {
        static double s_hlElapsed = 0.0;
        static double s_glElapsed = 0.0;
        static double s_totalElapsed = 0.0;
        static int s_hlTriCount = 0;
        static int s_glTriCount = 0;
    
        int hlTriCount = 0;
        int glTriCount = 0;
 
        double hlElapsed = 0.0;
        double glElapsed = 0.0;
        double totalElapsed = 0.0;

        // Manage how often to update performance number to avoid smearing.
        const double showPerfNumSec = 0.25;
    
        LARGE_INTEGER startHapticli, startGraphicli, startTotalli, endli;
        QueryPerformanceCounter(&startTotalli);
        QueryPerformanceCounter(&startHapticli);

        // Single pass render for both haptics and graphics.
        m_bSinglePassRender = m_bShapeDepthBuffer && !m_bHapticCameraView;

        //
        // draw haptic scene
        //
        hlTriCount = drawSceneHL(pObj);
        
        QueryPerformanceCounter(&endli);
        hlElapsed = double(_int64(endli.QuadPart - 
                                  startHapticli.QuadPart)) / g_liFreq.QuadPart;
        
        //
        // draw graphic scene
        //
        QueryPerformanceCounter(&startGraphicli);
        if (!m_bSinglePassRender)
        {
            // Use render produced by drawSceneHL (single pass render).
            glPushAttrib(GL_POLYGON_BIT);
            {
                if (m_bWireframe)
                {
                    glPolygonMode(GL_FRONT, GL_LINE);
                }

                const bool isHapticView = false;
                glTriCount = drawSceneGL(pObj, isHapticView);
            }
            glPopAttrib();

        }

        drawCursorGL();

        // Draw the frustum (if desired).
        if (m_bHapticCameraVisual)
        {
            glPushMatrix();
            glMultMatrixd(pObj->transform);

            drawFrustum(m_hapticCameraFrustum);

            glPopMatrix();
        }

        glFinish();
        QueryPerformanceCounter(&endli);
        glElapsed = double(_int64(endli.QuadPart - 
                                  startGraphicli.QuadPart)) / g_liFreq.QuadPart;

        totalElapsed = double(_int64(endli.QuadPart - 
                                     startTotalli.QuadPart)) / g_liFreq.QuadPart;

        // QueryPerformanceCounter(&endli);
        double perfElapsed = double(_int64(endli.QuadPart - 
                                           g_drawPerfli.QuadPart)) / g_liFreq.QuadPart;
        if ((perfElapsed > showPerfNumSec) && 
            (hlTriCount > 0 || glTriCount > 0))
        {
            QueryPerformanceCounter(&g_drawPerfli);
            s_hlElapsed = hlElapsed;
            s_glElapsed = glElapsed;
            s_totalElapsed = totalElapsed;
            s_hlTriCount = hlTriCount;
            s_glTriCount = glTriCount;
        }
        
        if (m_bPerformance)
        {
            drawPerformance(s_hlElapsed, s_glElapsed, s_totalElapsed, 
                            s_hlTriCount, s_glTriCount);
        }
    

        Invalidate(FALSE);
    }

    SwapBuffers(m_hDC);
}
Esempio n. 14
0
	//----------
	void Device::drawWorld() {
		auto colorSource = this->getColorSource();
		auto depthSource = this->getDepthSource();
		auto bodySource = this->getBodySource();

		if (!depthSource) {
			ofLogError("ofxKinectForWindows2::Device::drawPrettyMesh") << "No depth source initialised";
			return;
		}
		
		//point cloud
		{
			//setup some point cloud properties for kicks
			bool usePointSize = true;

#if OF_VERSION_MAJOR > 0 || OF_VERSION_MINOR >= 10
			auto mainWindow = std::static_pointer_cast<ofAppGLFWWindow>(ofGetCurrentWindow());
			usePointSize = mainWindow ? mainWindow->getSettings().glVersionMajor <= 2 : false;
#endif

			usePointSize = false;

			if (usePointSize) {
				glPushAttrib(GL_POINT_BIT);
				glPointSize(5.0f);
				glEnable(GL_POINT_SMOOTH);
			}

			ofPushStyle();

			bool useColor = colorSource.get();
			if (useColor) {
				useColor &= colorSource->getTexture().isAllocated();
			}

			if (useColor) {
				//bind kinect color camera texture and draw mesh from depth (which has texture coordinates)
				colorSource->getTexture().bind();
			}

			auto opts = Source::Depth::PointCloudOptions(true, Source::Depth::PointCloudOptions::TextureCoordinates::ColorCamera);
			auto mesh = depthSource->getMesh(opts);

			//draw point cloud
			mesh.drawVertices();

			//draw triangles
			ofSetColor(255, 150);
			mesh.drawWireframe();

			//draw fills faded
			ofSetColor(255, 50);
			mesh.drawFaces();

			if (useColor) {
				//unbind colour camera
				colorSource->getTexture().unbind();
			}

			ofPopStyle();

			//clear the point cloud drawing attributes
			if (usePointSize) {
				glPopAttrib();
			}
		}
		
		//bodies and floor
		if (bodySource) {
			bodySource->drawWorld();

			ofPushMatrix();
			ofRotateDeg(90, 0, 0, 1);
			ofMultMatrix(bodySource->getFloorTransform());
			ofDrawGridPlane(5.0f);
			ofPopMatrix();
		}

		//draw the view cones of depth and colour cameras
		ofPushStyle();
		ofNoFill();
		ofSetLineWidth(2.0f);
		ofSetColor(100, 200, 100);
		depthSource->drawFrustum();
		if (colorSource) {
			ofSetColor(200, 100, 100);
			colorSource->drawFrustum();
		}
		ofPopStyle();
	}
Esempio n. 15
0
void pgDrawLeg() {
    drawFrustum(0.1, 0.1, 0.75);
}
void AppStage_ComputeTrackerPoses::render()
{
    switch (m_menuState)
    {
    case eMenuState::inactive:
        break;
    case eMenuState::pendingControllerListRequest:
    case eMenuState::pendingControllerStartRequest:
    case eMenuState::pendingTrackerListRequest:
    case eMenuState::pendingTrackerStartRequest:
        break;
    case eMenuState::failedControllerListRequest:
    case eMenuState::failedControllerStartRequest:
    case eMenuState::failedTrackerListRequest:
    case eMenuState::failedTrackerStartRequest:
        break;
    case eMenuState::verifyHMD:
        {
            if (m_hmdView != nullptr)
            {
                PSMovePose pose = m_hmdView->getDisplayHmdPose();
                glm::quat orientation(pose.Orientation.w, pose.Orientation.x, pose.Orientation.y, pose.Orientation.z);
                glm::vec3 position(pose.Position.x, pose.Position.y, pose.Position.z);

                glm::mat4 rot = glm::mat4_cast(orientation);
                glm::mat4 trans = glm::translate(glm::mat4(1.0f), position);
                glm::mat4 transform = trans * rot;

                drawDK2Model(transform);
                drawTransformedAxes(transform, 10.f);
            }

            {
                PSMoveVolume volume;

                if (m_app->getOpenVRContext()->getHMDTrackingVolume(volume))
                {
                    drawTransformedVolume(glm::mat4(1.f), &volume, glm::vec3(0.f, 1.f, 1.f));
                }
            }
        } break;
    case eMenuState::verifyTrackers:
        {
            render_tracker_video();
        } break;
    case eMenuState::selectCalibrationType:
        break;
    case eMenuState::calibrateWithHMD:
        m_pCalibrateWithHMD->render();
        break;
    case eMenuState::calibrateWithMat:
        m_pCalibrateWithMat->render();
        break;
    case eMenuState::testTracking:
        {
            // Draw the origin axes
            drawTransformedAxes(glm::mat4(1.0f), 100.f);

            // Draw the HMD and tracking volume
            if (m_hmdView != nullptr)
            {
                // Compute a transform that goes from HMD tracking space to PSMove tracking space
                PSMovePose hmd_pose_at_origin = m_app->getOpenVRContext()->getHMDPoseAtPSMoveTrackingSpaceOrigin();
                glm::mat4 tracking_space_transform = psmove_pose_to_glm_mat4(hmd_pose_at_origin);
                glm::mat4 tracking_space_inv_transform = glm::inverse(tracking_space_transform);

                // Put the HMD transform in PSMove tracking space
                PSMovePose hmd_pose = m_hmdView->getDisplayHmdPose();
                glm::mat4 hmd_transform = tracking_space_inv_transform * psmove_pose_to_glm_mat4(hmd_pose);

                drawDK2Model(hmd_transform);
                drawTransformedAxes(hmd_transform, 10.f);

                PSMoveVolume volume;
                if (m_app->getOpenVRContext()->getHMDTrackingVolume(volume))
                {
                    drawTransformedVolume(tracking_space_inv_transform, &volume, glm::vec3(0.f, 1.f, 1.f));
                }
            }

            // Draw the frustum for each tracking camera
            for (t_tracker_state_map_iterator iter = m_trackerViews.begin(); iter != m_trackerViews.end(); ++iter)
            {
                const ClientTrackerView *trackerView = iter->second.trackerView;

                {
                    PSMoveFrustum frustum = trackerView->getTrackerFrustum();

                    drawFrustum(&frustum, k_psmove_frustum_color);
                }

                {
                    PSMovePose pose = trackerView->getTrackerPose();
                    glm::mat4 cameraTransform = psmove_pose_to_glm_mat4(pose);

                    drawTransformedAxes(cameraTransform, 20.f);
                }
            }

            // Draw the psmove model
            {
                PSMovePose pose = m_controllerView->GetPSMoveView().GetPose();
                glm::mat4 worldTransform = psmove_pose_to_glm_mat4(pose);

                drawPSMoveModel(worldTransform, glm::vec3(1.f, 1.f, 1.f));
                drawTransformedAxes(worldTransform, 10.f);
            }

        } break;
    case eMenuState::calibrateStepFailed:
        break;
    default:
        assert(0 && "unreachable");
    }
}
Esempio n. 17
0
void pgDrawHead() {
    drawFrustum(1.0, 1.2, 1.0);
}
Esempio n. 18
0
void pgDrawBody() {
    drawFrustum(1.0, 1.5, 2.0);
}