Beispiel #1
0
Ray Renderer::RayThruPixel(int i, int j)
{
	Ray viewRay;

	Camera* camera = m_scene.GetCamera();

	viewRay.SetOrigin(camera->GetlookfromPoint());

	int d = camera->Getfov();
	int l, r, b, t;
	double u, v;

	l = -(m_scene.GetImageWidth() / 2);
	r = m_scene.GetImageWidth() / 2;
	b = -(m_scene.GetImageHeight() / 2);
	t = m_scene.GetImageHeight() / 2;

	u = l + (r - l) * (i + 0.5) / m_scene.GetImageWidth();
	v = b + (t - b) * (j + 0.5) / m_scene.GetImageHeight();

	Vector w(camera->GetlookatPoint().Inverse());
	Vector upVector(camera->GetupVector());
	Vector uVector = w * upVector;

	Vector directionVector;
	
	directionVector = (w * (-d)) + (uVector * u) + (upVector * v);

	viewRay.SetDirection(directionVector);

	return viewRay;
}
Beispiel #2
0
MyGlWindow::MyGlWindow(int x, int y, int w, int h) :
  Fl_Gl_Window(x,y,w,h)
//==========================================================================
{
    
    mode( FL_RGB|FL_ALPHA|FL_DOUBLE | FL_STENCIL );
    
    fieldOfView = 45;

    MathVec3D<double> viewPoint( DEFAULT_VIEW_POINT );
    MathVec3D<double> viewCenter( DEFAULT_VIEW_CENTER );
    MathVec3D<double> upVector( DEFAULT_UP_VECTOR );
    double aspect = ( w /  h);
    m_viewer = new Viewer( viewPoint, viewCenter, upVector, 45, aspect );
    
    m_bvh = new BVH();
    m_particleList.resize(1000);
    for_each (m_particleList.begin(), m_particleList.end(), [] (Particle& p) {
        p.velocity  = Vec3f(0, 0, 0);
        p.color     = Vec3f(0.6, 0.6, 0);
        p.velocity  = Vec3f(0, 0, 0);
        p.timeAlive = 0;
        p.lifespan  = 5;
    });
    m_range = 0;
    m_direction = 0;
}
	//Set up the world, view, and projection transform matrices.
	void Graphics::SetupMatrices()
	{
		if(!CurrentCamera)
			return;  //^? I'm putting this here to stop the game from crashing when there is no camera...
							 // it's not that we shouldn't crash when there is no camera... obviously thats a hint that
							 // something is horrendously wrong. It's just annoying.

		//Set the position of the camera.
		float cameraX = CurrentCamera->transform->Position.x;
		float cameraY = CurrentCamera->transform->Position.y;
		float cameraH = -10.0f;

		//The eye point is the location of the viewer (center of the screen, 10 units away).
		Vec3 eyePoint( cameraX, cameraY, cameraH );
		//The look-at point is where the viewer is looking (center of the screen).
		Vec3 lookAtPoint( cameraX, cameraY, 0.0f );
		//The up vector defines which way is up (the y-direction).
		Vec3 upVector( 0.0f, 1.0f, 0.0f );
		//Create a left-handed view matrix.
		Mat4 matView;
		D3DXMatrixLookAtLH(&matView, &eyePoint, &lookAtPoint, &upVector);
		//Store the view matrix
		ViewMatrix = matView;
		//Create an orthogonal left-handed projection matrix.
		//This will transform everything to the view port with no perspective.
		//The near and far clipping plains are still needed, but not as important.
		Mat4 matProj;
		D3DXMatrixOrthoLH(&matProj, SurfaceSize.x , SurfaceSize.y , 1.0f, 100.0f);
		//Store the projection matrix;
		ProjMatrix = matProj;

		//Store the view projection matrix
		ViewProjMatrix = ViewMatrix * ProjMatrix;
	}
Beispiel #4
0
void QCamera::rotate( const QQuaternion& q )
{
    setUpVector(q * upVector());
    QVector3D viewVector = viewCenter() - position();
    QVector3D cameraToCenter = q * viewVector;
    setViewCenter(position() + cameraToCenter);
}
Beispiel #5
0
void RCViewableTransform::setFocalPoint(const Eks::Vector3D &aim)
  {
  Eks::Transform t = transform();

  transform = calculateTransform(t.translation(), aim, upVector());

  focalDistance = (aim - t.translation()).norm();
  transform = t;
  }
Beispiel #6
0
void QCamera::translate( const QVector3D& vLocal, CameraTranslationOption option )
{
    QVector3D viewVector = viewCenter() - position(); // From "camera" position to view center

    // Calculate the amount to move by in world coordinates
    QVector3D vWorld;
    if ( !qFuzzyIsNull( vLocal.x() ) )
    {
        // Calculate the vector for the local x axis
        QVector3D x = QVector3D::crossProduct(viewVector, upVector()).normalized();
        vWorld += vLocal.x() * x;
    }

    if ( !qFuzzyIsNull( vLocal.y() ) )
        vWorld += vLocal.y() * upVector();

    if ( !qFuzzyIsNull( vLocal.z() ) )
        vWorld += vLocal.z() * viewVector.normalized();

    // Update the camera position using the calculated world vector
    setPosition(position() + vWorld);

    // May be also update the view center coordinates
    if ( option == TranslateViewCenter )
        setViewCenter(viewCenter() + vWorld);

    // Refresh the camera -> view center vector
    viewVector = viewCenter() - position();

    // Calculate a new up vector. We do this by:
    // 1) Calculate a new local x-direction vector from the cross product of the new
    //    camera to view center vector and the old up vector.
    // 2) The local x vector is the normal to the plane in which the new up vector
    //    must lay. So we can take the cross product of this normal and the new
    //    x vector. The new normal vector forms the last part of the orthonormal basis
    QVector3D x = QVector3D::crossProduct(viewVector, upVector()).normalized();
    setUpVector(QVector3D::crossProduct(x, viewVector).normalized());
}
Beispiel #7
0
void RCViewableTransform::zoom(float factor, float, float)
  {
  Eks::Transform t = transform();

  float moveDist = focalDistance() * -0.5f * (factor - 1.0f);
  focalDistance = focalDistance() - moveDist;

  // flip axes because input x and y are in a top left coordinate system
  Eks::Vector3D look = upVector().cross(Eks::Vector3D(1.0f, 0.0f, 0.0f)) * moveDist;

  t.translate(look);

  transform = t;
  }
Beispiel #8
0
void FPSControls::applyTranslation( float dx, float dy, float dz,
    PerspectiveCamera& camera )
{
    Vector3f eye = camera.eye();
    Vector3f x = camera.right();
    Vector3f y = camera.up();
    Vector3f z = -( camera.forward() );

    // project the y axis onto the ground plane
    //Vector3f zp = m_worldToGroundPlane * z;
    //zp[ 1 ] = 0;
    //zp = m_groundPlaneToWorld * zp;
    //zp.normalize();

    eye = eye + dx * x + dy * upVector() + dz * z;
    camera.setLookAt( eye, eye - z, y );
}
void QD3D11MultiViewportViewer::translate( float dx, float dy, float dz )
{
	Vector3f eye = m_perspectiveCamera.eye();
	Vector3f x = m_perspectiveCamera.right();
	Vector3f y = m_perspectiveCamera.up();
	Vector3f z = m_perspectiveCamera.forward();

	// project the y axis onto the ground plane
	//Vector3f zp = m_worldToGroundPlane * z;
	//zp[ 1 ] = 0;
	//zp = m_groundPlaneToWorld * zp;
	//zp.normalize();

	// TODO: switch GLCamera over to have just a forward vector?
	// center is kinda stupid
	eye = eye + dx * x + dy * upVector() + dz * z;
	m_perspectiveCamera.setLookAt( eye, eye + z, y );
}
Beispiel #10
0
void RCViewableTransform::track(float x, float y)
  {
  Eks::Transform t = transform();

  float xScale;
  float yScale;
  approximatePixelSizeAtDistance(focalDistance(), xScale, yScale);

  // flip axes because input x and y are in a top left coordinate system
  Eks::Vector3D across = Eks::Vector3D(1.0f, 0.0f, 0.0f) * xScale;
  Eks::Vector3D up = upVector() * yScale;

  x *= -1.0f;

  t.translate(x * across + y * up);

  transform = t;
  }
void OGLRenderer::calcLightViewMatrix(void){
	//Calculate ViewMatrix
	glm::vec3 light_position = light[0].getPosition();
	glm::vec3 light_direction = light[0].getDirection();
	glm::vec3 upVector(0.0,1.0,0.0);
	glm::vec3 sVector;
	sVector = glm::normalize(glm::cross(light_direction, upVector));
	upVector = glm::normalize(glm::cross(sVector, light_direction));
	
	lightViewMatrix = glm::mat4(
		glm::vec4(sVector,glm::dot(-light_position,sVector)),
		glm::vec4(upVector,glm::dot(-light_position,upVector)),
		glm::vec4(-light_direction, glm::dot(light_direction,light_position)),
		glm::vec4(0.0,0.0,0.0,1.0)
	);
	lightViewMatrix = glm::transpose(lightViewMatrix);
	lightProjectionMatrix = glm::perspective(fov, (float)windowWidth/(float)windowHeight, 13.0f, 18.0f);
	// lightProjectionMatrix = glm::ortho(-5.0f, 5.0f, -5.0f, 5.0f, 13.0f, 18.0f);
}
Beispiel #12
0
MMatrix retargetLocator::getAimMatrix( MMatrix inputAimMatrix )
{
	MVector aimVector( inputAimMatrix(3,0), inputAimMatrix(3,1), inputAimMatrix(3,2) );
	aimVector -= discOffset;

	MVector upVector( -aimVector.y - aimVector.z, aimVector.x, aimVector.x );
	MVector otherVector = aimVector^upVector;
	upVector = otherVector^aimVector;

	MVector normalizeAim = aimVector.normal();
	upVector.normalize();
	otherVector.normalize();

	aimVector += discOffset;
	double buildMatrix[4][4] = { normalizeAim.x, normalizeAim.y, normalizeAim.z, 0,
	                             upVector.x,  upVector.y,  upVector.z,  0,
	                             otherVector.x, otherVector.y, otherVector.z, 0,
								 aimVector.x, aimVector.y, aimVector.z, 1 };
	return MMatrix( buildMatrix );
}
Beispiel #13
0
void NzPatch::ComputeSlope()
{
    float slope[25];
    NzVector3f upVector(0.f,1.f,0.f);

    float maxSlope = -10000.f;
    float minSlope = 10000.f;

    for(unsigned int j(0) ; j < 5 ; ++j)
        for(unsigned int i(0) ; i < 5 ; ++i)
        {
            slope[i+5*j] = m_vertexNormals.at(i+5*j).DotProduct(upVector);
            minSlope = std::min(std::fabs(slope[i+5*j]),minSlope);
            maxSlope = std::max(std::fabs(slope[i+5*j]),maxSlope);
        }

        //On calcule le contraste absolu entre la pente la plus forte et la plus faible
        m_slope = (maxSlope - minSlope)/(maxSlope + minSlope);
        float inv_sensitivity = 2;
        m_slope = std::pow(m_slope,inv_sensitivity);
}
Beispiel #14
0
void RCViewableTransform::rotateAboutPoint(const Eks::Vector3D &point, float x, float y)
  {
  if(_rotateEnabled)
    {
    Eks::Transform t = transform();

    // old translation vector
    float length = (t.translation() - point).norm();

    Eigen::AngleAxisf xRot(x * -0.005f, upVector());
    t.prerotate(xRot);

    Eigen::AngleAxisf yRot(y * -0.005f, Eks::Vector3D(1.0f, 0.0f, 0.0f));
    t.rotate(yRot);


    Eks::Vector3D newLook = t.matrix().col(2).head<3>();
    t.translation() = point + (newLook * length);

    transform = t;
    }
  }
Beispiel #15
0
void QGLView::setupCylinder(GLfloat r2, QVector3D P2, GLfloat r1, QVector3D P1, int detail, ModelType type)
{
    QVector<ModelVertex> vertices;
    QVector3D normal;

    // normal pointing from origin point to end point
    normal = P2 - P1;

    // create two perpendicular vectors - perp and q
    QVector3D perp = normal;
    if ((normal.x() == 0) && (normal.z() == 0)) {
        perp.setX(perp.x() + 1);
    } else {
        perp.setY(perp.y() + 1);
    }

    // cross product
    QVector3D q = QVector3D::crossProduct(perp, normal);
    perp = QVector3D::crossProduct(normal, q);

    // normalize vectors
    perp.normalize();
    q.normalize();

    // calculate vertices
    GLfloat twoPi = 2 * M_PI;
    for (int i = 0; i < detail; ++i)
    {
        GLfloat theta1 = (GLfloat)i / (GLfloat)detail * twoPi; // go around circle and get points
        GLfloat theta2 = (GLfloat)(i+1) / (GLfloat)detail * twoPi;
        ModelVertex vertex[6];

        QVector3D upVector(0,0,1);
        QVector3D downVector(0,0,-1);
        QVector3D resultVector;

        // normals
        normal.setX(qCos(theta1) * perp.x() + qSin(theta1) * q.x());
        normal.setY(qCos(theta1) * perp.y() + qSin(theta1) * q.y());
        normal.setZ(qCos(theta1) * perp.z() + qSin(theta1) * q.z());

        // top vertex
        vertex[0].position.x = P1.x() + r1 * normal.x();
        vertex[0].position.y = P1.y() + r1 * normal.y();
        vertex[0].position.z = P1.z() + r1 * normal.z();
        resultVector = (upVector + normal).normalized();
        vertex[0].normal.x = resultVector.x();
        vertex[0].normal.y = resultVector.y();
        vertex[0].normal.z = resultVector.z();


        // bottom vertex
        vertex[1].position.x = P2.x() + r2 * normal.x();
        vertex[1].position.y = P2.y() + r2 * normal.y();
        vertex[1].position.z = P2.z() + r2 * normal.z();
        resultVector = (downVector + normal).normalized();
        vertex[1].normal.x = resultVector.x();
        vertex[1].normal.y = resultVector.y();
        vertex[1].normal.z = resultVector.z();

        // normals
        normal.setX(qCos(theta2) * perp.x() + qSin(theta2) * q.x());
        normal.setY(qCos(theta2) * perp.y() + qSin(theta2) * q.y());
        normal.setZ(qCos(theta2) * perp.z() + qSin(theta2) * q.z());

        vertex[2].position.x = P2.x() + r2 * normal.x();
        vertex[2].position.y = P2.y() + r2 * normal.y();
        vertex[2].position.z = P2.z() + r2 * normal.z();
        resultVector = (downVector + normal).normalized();
        vertex[2].normal.x = resultVector.x();
        vertex[2].normal.y = resultVector.y();
        vertex[2].normal.z = resultVector.z();

        vertex[3].position.x = P1.x() + r1 * normal.x();
        vertex[3].position.y = P1.y() + r1 * normal.y();
        vertex[3].position.z = P1.z() + r1 * normal.z();
        resultVector = (upVector + normal).normalized();
        vertex[3].normal.x = resultVector.x();
        vertex[3].normal.y = resultVector.y();
        vertex[3].normal.z = resultVector.z();

        if (r2 != 0.0)
        {
            vertex[5].position.x = P2.x();
            vertex[5].position.y = P2.y();
            vertex[5].position.z = P2.z();
            vertex[5].normal.x = downVector.x();
            vertex[5].normal.y = downVector.y();
            vertex[5].normal.z = downVector.z();

            vertices.append(vertex[5]);
            vertices.append(vertex[2]);
            vertices.append(vertex[1]);
        }

        if (r1 != 0.0)
        {
            vertex[4].position.x = P1.x();
            vertex[4].position.y = P1.y();
            vertex[4].position.z = P1.z();
            vertex[4].normal.x = upVector.x();
            vertex[4].normal.y = upVector.y();
            vertex[4].normal.z = upVector.z();

            vertices.append(vertex[4]);
            vertices.append(vertex[0]);
            vertices.append(vertex[3]);
        }

         // append vertex
        vertices.append(vertex[0]);
        vertices.append(vertex[1]);
        vertices.append(vertex[2]);

        vertices.append(vertex[0]);
        vertices.append(vertex[2]);
        vertices.append(vertex[3]);
    }

    initializeVertexBuffer(type, vertices);

    addDrawableList(type);
}
Beispiel #16
0
void 
cloud_cb (const sensor_msgs::PointCloud2ConstPtr& input)
{
  // Container for original & filtered data
  pcl::PCLPointCloud2* cloud = new pcl::PCLPointCloud2; 
  pcl::PCLPointCloud2ConstPtr cloudPtr(cloud);
  // pcl::PCLPointCloud2 cloud_filtered;

  pcl::PCLPointCloud2::Ptr cloud_blob (new pcl::PCLPointCloud2);
  pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_filtered (new pcl::PointCloud<pcl::PointXYZ>), cloud_p (new pcl::PointCloud<pcl::PointXYZ>), cloud_f (new pcl::PointCloud<pcl::PointXYZ>);

  // Convert to PCL data type
  pcl_conversions::toPCL(*input, *cloud_blob);

  // Perform the actual filtering
  pcl::PCLPointCloud2::Ptr cloud_filtered_blob (new pcl::PCLPointCloud2);
  pcl::VoxelGrid<pcl::PCLPointCloud2> sor;
  sor.setInputCloud (cloud_blob);
  sor.setLeafSize (0.05, 0.05, 0.05);
  sor.setFilterFieldName("z");
  sor.setFilterLimits(0.01, 0.3);
  sor.filter (*cloud_filtered_blob);

  // // Remove outlier X
  // pcl::PCLPointCloud2::Ptr cloud_filtered_blobx (new pcl::PCLPointCloud2);
  // pcl::VoxelGrid<pcl::PCLPointCloud2> sorx;
  // sorx.setInputCloud(cloud_filtered_blobz);
  // sorx.setFilterFieldName("x");
  // sorx.setFilterLimits(-1, 1);
  // sorx.filter(*cloud_filtered_blobx);

  // // Remove outlier Y
  // pcl::PCLPointCloud2::Ptr cloud_filtered_blob (new pcl::PCLPointCloud2);
  // pcl::VoxelGrid<pcl::PCLPointCloud2> sory;
  // sory.setInputCloud(cloud_filtered_blobx);
  // sory.setFilterFieldName("y");
  // sory.setFilterLimits(-1, 1);
  // sory.filter(*cloud_filtered_blob);

  // Convert to the templated PointCloud
  pcl::fromPCLPointCloud2 (*cloud_filtered_blob, *cloud_filtered);

  pcl::ModelCoefficients::Ptr coefficients (new pcl::ModelCoefficients ());
  pcl::PointIndices::Ptr inliers (new pcl::PointIndices ());
  // Create the segmentation object
  pcl::SACSegmentation<pcl::PointXYZ> seg;
  // Optional
  seg.setOptimizeCoefficients (true);
  // Mandatory
  seg.setModelType (pcl::SACMODEL_PLANE);
  seg.setMethodType (pcl::SAC_RANSAC);
  Eigen::Vector3f upVector(0, 0, 1);
  seg.setAxis(upVector);
  seg.setEpsAngle(1.5708);
  seg.setMaxIterations (1000);
  seg.setDistanceThreshold (0.05);
  seg.setInputCloud (cloud_filtered);
  seg.segment (*inliers, *coefficients);
  if (inliers->indices.size () == 0)
  {
    std::cerr << "Could not estimate a planar model for the given dataset." << std::endl;
    return;
  }

  // Create the filtering object
  pcl::ExtractIndices<pcl::PointXYZ> extract;
  // Extract the inliers
  extract.setInputCloud (cloud_filtered);
  extract.setIndices (inliers);
  extract.setNegative (false);
  extract.filter (*cloud_p);

  // Publish inliers
  // sensor_msgs::PointCloud2 inlierpc;
  // pcl_conversions::fromPCL(cloud_p, inlierpc);
  pub.publish (*cloud_p);

  // Publish the model coefficients
  pcl_msgs::ModelCoefficients ros_coefficients;
  pcl_conversions::fromPCL(*coefficients, ros_coefficients);
  pubCoef.publish (ros_coefficients);

    // ==========================================

  // // Convert to ROS data type
  // sensor_msgs::PointCloud2 downpc;
  // pcl_conversions::fromPCL(cloud_filtered, downpc);

  // // Publish the data
  // pub.publish (downpc);


  // pcl::ModelCoefficients coefficients;
  // pcl::PointIndices inliers;

  // //.makeShared()
  // // Create the segmentation object
  // pcl::SACSegmentation<pcl::PointXYZ> seg;
  // seg.setInputCloud (&cloudPtr);
  // seg.setOptimizeCoefficients (true); // Optional
  // seg.setModelType (pcl::SACMODEL_PLANE); // Mandatory
  // seg.setMethodType (pcl::SAC_RANSAC);
  // seg.setDistanceThreshold (0.1);

  // seg.segment (inliers, coefficients);

  // if (inliers.indices.size () == 0)
  // {
  //   PCL_ERROR ("Could not estimate a planar model for the given dataset.");
  //   return;
  // }

  // std::cerr << "Model coefficients: " << coefficients.values[0] << " " 
  //                                     << coefficients.values[1] << " "
  //                                     << coefficients.values[2] << " " 
  //                                     << coefficients.values[3] << std::endl;

  // // Publish the model coefficients
  // pcl_msgs::ModelCoefficients ros_coefficients;
  // pcl_conversions::fromPCL(coefficients, ros_coefficients);
  // pubCoef.publish (ros_coefficients);
}
Beispiel #17
0
		void GLSpriteRenderer::Render() {
			SPADES_MARK_FUNCTION();
			lastImage = NULL;
			program->Use();
			
			projectionViewMatrix(program);
			rightVector(program);
			upVector(program);
			texture(program);
			viewMatrix(program);
			fogDistance(program);
			fogColor(program);
			
			positionAttribute(program);
			spritePosAttribute(program);
			colorAttribute(program);
			
			projectionViewMatrix.SetValue(renderer->GetProjectionViewMatrix());
			viewMatrix.SetValue(renderer->GetViewMatrix());
			
			fogDistance.SetValue(renderer->GetFogDistance());
			
			Vector3 fogCol = renderer->GetFogColor();
			fogColor.SetValue(fogCol.x,fogCol.y,fogCol.z);
			
			const client::SceneDefinition& def = renderer->GetSceneDef();
			rightVector.SetValue(def.viewAxis[0].x,
								 def.viewAxis[0].y,
								 def.viewAxis[0].z);
			upVector.SetValue(def.viewAxis[1].x,
							  def.viewAxis[1].y,
							  def.viewAxis[1].z);
			texture.SetValue(0);
			
			device->ActiveTexture(0);
			
			device->EnableVertexAttribArray(positionAttribute(), true);
			device->EnableVertexAttribArray(spritePosAttribute(), true);
			device->EnableVertexAttribArray(colorAttribute(), true);
			
			
			for(size_t i = 0; i < sprites.size(); i++){
				Sprite& spr = sprites[i];
				if(spr.image != lastImage){
					Flush();
					lastImage = spr.image;
					SPAssert(vertices.empty());
				}
				
				Vertex v;
				v.x = spr.center.x;
				v.y = spr.center.y;
				v.z = spr.center.z;
				v.radius = spr.radius;
				v.angle = spr.angle;
				v.r = spr.color.x;
				v.g = spr.color.y;
				v.b = spr.color.z;
				v.a = spr.color.w;
				
				uint32_t idx = (uint32_t)vertices.size();
				v.sx = -1; v.sy = -1;
				vertices.push_back(v);
				v.sx = 1; v.sy = -1;
				vertices.push_back(v);
				v.sx = -1; v.sy = 1;
				vertices.push_back(v);
				v.sx = 1; v.sy = 1;
				vertices.push_back(v);
				
				indices.push_back(idx);
				indices.push_back(idx + 1);
				indices.push_back(idx + 2);
				indices.push_back(idx + 1);
				indices.push_back(idx + 3);
				indices.push_back(idx + 2);
			}
		
			Flush();
			
			device->EnableVertexAttribArray(positionAttribute(), false);
			device->EnableVertexAttribArray(spritePosAttribute(), false);
			device->EnableVertexAttribArray(colorAttribute(), false);
		}
Beispiel #18
0
void CTornado::UpdateFlow()
{
	IVehicleSystem* pVehicleSystem = g_pGame->GetIGameFramework()->GetIVehicleSystem();
	assert(pVehicleSystem);

	float frameTime(gEnv->pTimer->GetFrameTime());

	IPhysicalWorld *ppWorld = gEnv->pPhysicalWorld;

	Vec3 pos(GetEntity()->GetWorldPos());

	//first, check the entities in range
	m_nextEntitiesCheck -= frameTime;
	if (m_nextEntitiesCheck<0.0f)
	{
		m_nextEntitiesCheck = 1.0f;
		
		Vec3 radiusVec(m_radius,m_radius,0);
		
		IPhysicalEntity **ppList = NULL;

		int	numEnts = ppWorld->GetEntitiesInBox(pos-radiusVec,pos+radiusVec+Vec3(0,0,m_cloudHeight*0.5f),ppList,ent_sleeping_rigid|ent_rigid|ent_living);

		m_spinningEnts.clear();
		for (int i=0;i<numEnts;++i)
		{
			// add check for spectating players...
			EntityId id = ppWorld->GetPhysicalEntityId(ppList[i]);
			CActor* pActor = static_cast<CActor*>(g_pGame->GetIGameFramework()->GetIActorSystem()->GetActor(id));
			if(!pActor || !pActor->GetSpectatorMode())
			{
				m_spinningEnts.push_back(id);
			}
		}
		//OutputDistance();
	}

	//mess entities around
	for (size_t i=0;i<m_spinningEnts.size();++i)
	{
		IPhysicalEntity *ppEnt = ppWorld->GetPhysicalEntityById(m_spinningEnts[i]);
		if (ppEnt)
		{
			pe_status_pos spos;
			pe_status_dynamics sdyn;

			if (!ppEnt->GetStatus(&spos) || !ppEnt->GetStatus(&sdyn))
				continue;
		
			//gEnv->pRenderer->GetIRenderAuxGeom()->DrawSphere(spos.pos,2.0f,ColorB(255,0,255,255));
						
			Vec3 delta(pos - spos.pos);
			delta.z = 0.0f;

			float dLen(delta.len());
			float forceMult(max(0.0f,(m_radius-dLen)/m_radius));

			if (dLen>0.001f)
				delta /= dLen;
			else
				delta.zero();

			Vec3 upVector(0,0,1);

			float spinImpulse(m_spinImpulse);
			float attractionImpulse(m_attractionImpulse);
			float upImpulse(m_upImpulse);

			if (ppEnt->GetType() == PE_LIVING)
			{
				upImpulse *= 0.75f;
				attractionImpulse *= 0.35f;
				spinImpulse *= 1.5f;
			}

			
			if (IVehicle* pVehicle = pVehicleSystem->GetVehicle(m_spinningEnts[i]))
			{
				IVehicleMovement* pMovement = pVehicle->GetMovement();

				if (pMovement && pMovement->GetMovementType() == IVehicleMovement::eVMT_Air)
				{
					SVehicleMovementEventParams params;
					params.fValue = forceMult;
					pMovement->OnEvent(IVehicleMovement::eVME_Turbulence, params);
				}
			}

			Vec3 spinForce( (delta % upVector) * spinImpulse );
			Vec3 attractionForce(delta * attractionImpulse);
			Vec3 upForce(0,0,upImpulse);

			pe_action_impulse aimpulse;

			aimpulse.impulse = (spinForce + attractionForce + upForce) * (forceMult * sdyn.mass * frameTime);
			aimpulse.angImpulse = (upVector + (delta % upVector)) * (gf_PI * 0.33f * forceMult * sdyn.mass * frameTime);

			aimpulse.iApplyTime = 0;
			ppEnt->Action(&aimpulse);

			//gEnv->pRenderer->GetIRenderAuxGeom()->DrawLine(spos.pos,ColorB(255,0,255,255),spos.pos+aimpulse.impulse.GetNormalizedSafe(ZERO),ColorB(255,0,255,255));
		}
	}
}
//! OnAnimate() is called just before rendering the whole scene.
//! nodes may calculate or store animations here, and may do other useful things,
//! dependent on what they are.
void CSceneNodeAnimatorCameraMaya::animateNode(ISceneNode *node, u32 timeMs)
{
	//Alt + LM = Rotate around camera pivot
	//Alt + LM + MM = Dolly forth/back in view direction (speed % distance camera pivot - max distance to pivot)
	//Alt + MM = Move on camera plane (Screen center is about the mouse pointer, depending on move speed)

	if (!node || node->getType() != ESNT_CAMERA)
		return;

	ICameraSceneNode* camera = static_cast<ICameraSceneNode*>(node);

	// If the camera isn't the active camera, and receiving input, then don't process it.
	if(!camera->isInputReceiverEnabled())
		return;

	scene::ISceneManager * smgr = camera->getSceneManager();
	if(smgr && smgr->getActiveCamera() != camera)
		return;

	if (OldCamera != camera)
	{
		OldTarget = camera->getTarget();
		OldCamera = camera;
		LastCameraTarget = OldTarget;
	}
	else
	{
		OldTarget += camera->getTarget() - LastCameraTarget;
	}

	core::vector3df target = camera->getTarget();

	f32 nRotX = RotX;
	f32 nRotY = RotY;
	f32 nZoom = CurrentZoom;

	if ( (isMouseKeyDown(0) && isMouseKeyDown(2)) || isMouseKeyDown(1) )
	{
		if (!Zooming)
		{
			ZoomStart = MousePos;
			Zooming = true;
			nZoom = CurrentZoom;
		}
		else
		{
			const f32 targetMinDistance = 0.1f;
			nZoom += (ZoomStart.X - MousePos.X) * ZoomSpeed;

			if (nZoom < targetMinDistance) // jox: fixed bug: bounce back when zooming to close
				nZoom = targetMinDistance;
		}
	}
	else if (Zooming)
	{
		const f32 old = CurrentZoom;
		CurrentZoom = CurrentZoom + (ZoomStart.X - MousePos.X ) * ZoomSpeed;
		nZoom = CurrentZoom;

		if (nZoom < 0)
			nZoom = CurrentZoom = old;
		Zooming = false;
	}

	// Translation ---------------------------------

	core::vector3df translate(OldTarget), upVector(camera->getUpVector());

	core::vector3df tvectX = Pos - target;
	tvectX = tvectX.crossProduct(upVector);
	tvectX.normalize();

	const SViewFrustum* const va = camera->getViewFrustum();
	core::vector3df tvectY = (va->getFarLeftDown() - va->getFarRightDown());
	tvectY = tvectY.crossProduct(upVector.Y > 0 ? Pos - target : target - Pos);
	tvectY.normalize();

	if (isMouseKeyDown(2) && !Zooming)
	{
		if (!Translating)
		{
			TranslateStart = MousePos;
			Translating = true;
		}
		else
		{
			translate +=  tvectX * (TranslateStart.X - MousePos.X)*TranslateSpeed +
			              tvectY * (TranslateStart.Y - MousePos.Y)*TranslateSpeed;
		}
	}
	else if (Translating)
	{
		translate += tvectX * (TranslateStart.X - MousePos.X)*TranslateSpeed +
		             tvectY * (TranslateStart.Y - MousePos.Y)*TranslateSpeed;
		OldTarget = translate;
		Translating = false;
	}

	// Rotation ------------------------------------

	if (isMouseKeyDown(0) && !Zooming)
	{
		if (!Rotating)
		{
			RotateStart = MousePos;
			Rotating = true;
			nRotX = RotX;
			nRotY = RotY;
		}
		else
		{
			nRotX += (RotateStart.X - MousePos.X) * RotateSpeed;
			nRotY += (RotateStart.Y - MousePos.Y) * RotateSpeed;
		}
	}
	else if (Rotating)
	{
		RotX += (RotateStart.X - MousePos.X) * RotateSpeed;
		RotY += (RotateStart.Y - MousePos.Y) * RotateSpeed;
		nRotX = RotX;
		nRotY = RotY;
		Rotating = false;
	}

	// Set Pos ------------------------------------

	target = translate;

	Pos.X = nZoom + target.X;
	Pos.Y = target.Y;
	Pos.Z = target.Z;

	Pos.rotateXYBy(nRotY, target);
	Pos.rotateXZBy(-nRotX, target);

	// Rotation Error ----------------------------

	// jox: fixed bug: jitter when rotating to the top and bottom of y
	upVector.set(0,1,0);
	upVector.rotateXYBy(-nRotY);
	upVector.rotateXZBy(-nRotX+180.f);

	camera->setPosition(Pos);
	camera->setTarget(target);
	camera->setUpVector(upVector);
	LastCameraTarget = camera->getTarget();
}
Beispiel #20
0
//--------------------------------------------------------------
void ofApp::setup(){
    if( !ofFile::doesFileExist("11to16.bin") ){
        ofSystemAlertDialog("Make sure you have 11to16.bin, xTable.bin and zTable.bin in your data folder!");
        ofExit();
    }
    ofBackground(0, 0, 0);
    
    // kinect
    kinectIsReady = false;
    kinect.open();
    kinectWidth = 512;
    kinectHeight = 424;
    kinectDepth = (int)kinect.maxDistance.getMax();
    
    // listener
    reset.addListener(this, &ofApp::resetPressed);
    enableSmoothLighting.addListener(this, &ofApp::enableSmoothLightingChanged);
    enableScanPeople.addListener(this, &ofApp::enableScanPeopleChanged);
    saveReferenceDepthPixels.addListener(this, &ofApp::saveReferenceDepthPixelsPressed);
    ofAddListener(ofxSimpleTimer::TIMER_COMPLETE, this, &ofApp::timerComplete);
    
    // gui
    showPanel = true;
    panel.setup("distance in mm", "settings.xml", 0, 0);
    // - kinect
    panel.add(kinect.minDistance);
    panel.add(kinect.maxDistance);
    panel.add(step.set("step", 5, 3, 30));
    panel.add(stopUpdatingKinectBullet.set("stopUpdatingKinectBullet", false));
    // - debug
    panel.add(enableDrawDebug.set("enableDrawDebug", true));
    panel.add(enableDrawKinectWireFrame.set("enableDrawKinectWireFrame", true));
    panel.add(enableDrawAssimpModelWireFrame.set("enableDrawAssimpModelWireFrame", false));
    panel.add(hideKinectMesh.set("hideKinectMesh", false));
    panel.add(enableDrawGuideLine.set("enableDrawGuideLine", false));
    panel.add(enableMouseInput.set("enableMouseInput", true));
    panel.add(enableDrawDebugSpheres.set("enableDrawDebugSpheres", false));
    panel.add(enableScanPeople.set("enableScanPeople", false));
    panel.add(saveReferenceDepthPixels.setup("saveReferenceDepth"));
    panel.add(probabilityFactor.set("probabilityFactor", kinect.maxDistance, 1*PROBABILITY_FACTOR_MIN_FACTOR, kinect.maxDistance.getMax()*PROBABILITY_FACTOR_MAX_FACTOR));
    panel.add(reset.setup("reset"));
    // - dmx
    panel.add(enableDmx.set("enableDmx", false));
    for (int i = 0; i < DMX_CHANNEL_NUMBER; i++) {
        panel.add(dmxChannels[i].set("DMX Channel "+ofToString(i+1), 127, 0, 255));
    }
    // - light
    panel.add(lightSpecularColor.set("lightSpecularColor", ofFloatColor::red, ofFloatColor::black, ofFloatColor::white));
    panel.add(lightDissuseColor.set("lightDiffuseColor", ofFloatColor::green, ofFloatColor::black, ofFloatColor::white));
    panel.add(lightAmbientColor.set("lightAmbientColor", ofFloatColor::blue, ofFloatColor::black, ofFloatColor::white));
    panel.add(lightAttenuation.set("lightAttenuation", ofVec3f(1.0, 0.0, 0.0), ofVec3f(0.0, 0.0, 0.0), ofVec3f(5.0, 0.01, 0.0001)));
    panel.add(enableSmoothLighting.set("enableSmoothLighting", true));
    panel.add(enableSeparateSpecularLight.set("enableSeparateSpecularLight", false));
    panel.add(lightPosition.set("lightPosition", ofVec3f(kinectWidth/2.0, kinectHeight/2.0, kinect.minDistance/2.0), ofVec3f(0, 0, -kinectDepth), ofVec3f(kinectWidth, kinectHeight, kinectDepth)));
    // - material
    panel.add(materialSpecularColor.set("materialSpecularColor", ofFloatColor::red, ofFloatColor::black, ofFloatColor::white));
    panel.add(materialDiffuseColor.set("materialDiffuseColor", ofFloatColor::green, ofFloatColor::black, ofFloatColor::white));
    panel.add(materialAmbientColor.set("materialAmbientColor", ofFloatColor::blue, ofFloatColor::black, ofFloatColor::white));
    panel.add(materialEmissiveColor.set("materialEmissiveColor", ofFloatColor::black, ofFloatColor::black, ofFloatColor::white));
    panel.add(materialShininess.set("materialShininess", 64, 0, 128));
    // camera
    panel.add(cameraFov.set("cameraFov", 60, 1, 180));
    panel.add(cameraNearDist.set("cameraNearDist", 6.65107, 0, 100));
    panel.add(cameraFarDist.set("cameraFarDist", 6651.07, 0, kinectDepth));
    panel.add(cameraPosition.set("cameraPosition", ofVec3f(kinectWidth/2.0, kinectHeight/2.0, 0), ofVec3f(0, 0, -kinectDepth), ofVec3f(kinectWidth, kinectHeight, kinectDepth)));
    panel.add(cameraLookAt.set("cameraLookAt", ofVec3f(kinectWidth/2.0, kinectHeight/2.0, kinect.minDistance), ofVec3f(0, 0, -kinectDepth), ofVec3f(kinectWidth, kinectHeight, kinectDepth)));
    // world
    panel.add(modelStartPosition.set("modelStartPosition", ofVec3f(cameraPosition), cameraPosition.getMin(), cameraPosition.getMax()));
    panel.add(worldGravity.set("worldGravity", ofVec3f(0, 0, 15.0), ofVec3f(-100, -100, -100), ofVec3f(30, 30, 30)));
    panel.add(modelMass.set("modelMass", 0.000005, 0.000005, 1)); // 1 is 1 kg
    panel.add(enableAddModel.set("enableAddModel", false));
    panel.add(enableAddModelRandom.set("enableAddModelRandom", false));
    // load saved settings data
    panel.loadFromFile("settings.xml");
    // minimize all guis
    panel.minimizeAll();
    
    // dmx
    dmx.connect("tty.usbserial-ENY46L4I"); // use the name
    //dmx.connect(0); // or use a number
    
    // caemera
    camera.setAutoDistance(false);
    if (!enableMouseInput) camera.disableMouseInput();
    camera.setPosition(cameraPosition);
    ofVec3f upVector(0, -1, 0);
    camera.lookAt(ofVec3f(cameraLookAt), upVector);
    
    // bullet
    world.setup();
    world.enableGrabbing();
    world.setCamera(&camera);
    world.setGravity(worldGravity);
    
    // model
    // assimpModelLoaders
    // - sakura
    assimpModelLoaders[0].loadModel("models/sakura/sakura2/sakura2.3ds", true);
    assimpModelLoaders[0].setPosition(ofGetWidth()/2, ofGetHeight()/2, 0);
    assimpModelLoaders[0].setScale(1.0, 1.0, 1.0);
    // - bitcoin
    assimpModelLoaders[1].loadModel("models/bitcoin/bitcoin4/bitcoin_v02.3ds", true);
    assimpModelLoaders[1].setPosition(ofGetWidth()/2, ofGetHeight()/2, 0);
    assimpModelLoaders[1].setScale(1.0, 1.0, 1.0);
    // - dna
    assimpModelLoaders[2].loadModel("models/dna/dna6/dna_low_green.3ds", true);
    assimpModelLoaders[2].setPosition(ofGetWidth()/2, ofGetHeight()/2, 0);
    assimpModelLoaders[2].setScale(2.0, 2.0, 2.0);
    // - dgcoin
    assimpModelLoaders[3].loadModel("models/bitcoin/bitcoin6/DGbitcoin_low_blue_0_180_235.3ds", true);
    assimpModelLoaders[3].setPosition(ofGetWidth()/2, ofGetHeight()/2, 0);
    assimpModelLoaders[3].setScale(2.0, 2.0, 2.0);
    // - maple
    assimpModelLoaders[4].loadModel("models/maple/maple1/maple_orange.3ds", true);
    assimpModelLoaders[4].setPosition(ofGetWidth()/2, ofGetHeight()/2, 0);
    assimpModelLoaders[4].setScale(0.65, 0.65, 0.65);
    
    // modelSetVector
    modelSetVector.resize(4);
    // - sakura
    modelSetVector[0].push_back(0);
    // - bitcoin & dgcoin
    modelSetVector[1].push_back(1);
    modelSetVector[1].push_back(3);
    // - dna
    modelSetVector[2].push_back(2);
    // - maple
    modelSetVector[3].push_back(4);
    
    // referenceAssimpModelBulletShapes
    ofQuaternion startRot = ofQuaternion(1., 0., 0., PI);
    for (int i = 0; i < MODEL_NUMBER; i++) {
        for (int j = 0; j < assimpModelLoaders[i].getNumMeshes(); j++) {
            referenceAssimpModelBulletShapes[i] = new ofxBulletCustomShape;
            referenceAssimpModelBulletShapes[i]->addMesh(assimpModelLoaders[i].getMesh(j), assimpModelLoaders[i].getScale(), true);
            ofVec3f startLoc = ofVec3f( ofRandom(-5, 5), ofRandom(0, -10), ofRandom(-5, 5) );
            referenceAssimpModelBulletShapes[i]->create(world.world, startLoc, startRot, 3.);
            referenceAssimpModelBulletShapes[i]->add();
        }
    }
    currentModelSetId = 0;
    
    // light
    light.setSpecularColor(lightSpecularColor);
    light.setDiffuseColor(lightDissuseColor);
    light.setAmbientColor(lightAmbientColor);
    light.setAttenuation(lightAttenuation->x, lightAttenuation->y, lightAttenuation->z);
    ofSetSmoothLighting(enableSmoothLighting);
    light.setPosition(lightPosition);
    light.setPointLight();
    
    // timer
    timer.setName("play");
    timer.setTime(MODEL_PLAY_SECONDS*1000, 1);
    timer.start();
    
    // debug
    ofSetVerticalSync(false);
    ofSetFrameRate(0);
    // - camera target
    debugSphereCameraTarget.set(10, 3);
    // - debug spheres
    float debugSphereRadius = 5;
    int debugSphereResolution = 3;
    float samplingNumber = 100;
    ofVec3f debugSphereNumber(kinectWidth/samplingNumber, kinectHeight/samplingNumber, kinectDepth/samplingNumber);
    ofVec3f gapBetweenSpheres(kinectWidth/debugSphereNumber.x, kinectHeight/debugSphereNumber.y, kinectDepth/debugSphereNumber.z);
    for (int x = 0; x < debugSphereNumber.x; x++) {
        for (int y = 0; y < debugSphereNumber.y; y++) {
            for (int z = 0; z < debugSphereNumber.z; z++) {
                ofSpherePrimitive instantSphere;
                instantSphere.set(debugSphereRadius, debugSphereResolution);
                instantSphere.setPosition(x*gapBetweenSpheres.x, y*gapBetweenSpheres.y, z*gapBetweenSpheres.z);
                debugSpheres.push_back(instantSphere);
            }
        }
    }
}
Beispiel #21
0
QQuaternion QCamera::tiltRotation(float angle) const
{
    QVector3D viewVector = viewCenter() - position();
    QVector3D xBasis = QVector3D::crossProduct(upVector(), viewVector.normalized()).normalized();
    return QQuaternion::fromAxisAndAngle( xBasis, -angle );
}
Beispiel #22
0
float buildPolygonExtrusion(const Polygon& polygon,
    double minHeight,
    double height,
    std::vector<PolygonVertex>& outVertices,
    std::vector<unsigned int>& outIndices,
    const std::unique_ptr<HeightData>& elevation,
    float inverseTileScale)
{
    int vertexDataOffset = outVertices.size();
    glm::vec3 upVector(0.0f, 0.0f, 1.0f);
    glm::vec3 normalVector;
    float minz = 0.f;
    float cz = 0.f;

    // Compute min and max height of the polygon
    if (elevation) {
        // The polygon centroid height
        cz = sampleElevation(centroid(polygon), elevation);
        minz = std::numeric_limits<float>::max();

        for (auto& line : polygon) {
            for (size_t i = 0; i < line.size(); i++) {
                glm::vec3 p(line[i]);

                float pz = sampleElevation(glm::vec2(p.x, p.y), elevation);

                minz = std::min(minz, pz);
            }
        }
    }

    for (auto& line : polygon) {
        size_t lineSize = line.size();

        outVertices.reserve(outVertices.size() + lineSize * 4);
        outIndices.reserve(outIndices.size() + lineSize * 6);

        for (size_t i = 0; i < lineSize - 1; i++) {
            glm::vec3 a(line[i]);
            glm::vec3 b(line[i+1]);

            if (a == b) { continue; }

            normalVector = glm::cross(upVector, b - a);
            normalVector = glm::normalize(normalVector);

            a.z = height + cz * inverseTileScale;
            outVertices.push_back({a, normalVector});
            b.z = height + cz * inverseTileScale;
            outVertices.push_back({b, normalVector});
            a.z = minHeight + minz * inverseTileScale;
            outVertices.push_back({a, normalVector});
            b.z = minHeight + minz * inverseTileScale;
            outVertices.push_back({b, normalVector});

            outIndices.push_back(vertexDataOffset+0);
            outIndices.push_back(vertexDataOffset+1);
            outIndices.push_back(vertexDataOffset+2);
            outIndices.push_back(vertexDataOffset+1);
            outIndices.push_back(vertexDataOffset+3);
            outIndices.push_back(vertexDataOffset+2);

            vertexDataOffset += 4;
        }
    }

    return cz;
}
Beispiel #23
0
void Light::shadowBegin()
{
#ifndef ARCH_PSP
#ifndef OPENGL_ES
#ifndef ARCH_DC
	GLShader::clear();
	glPushAttrib (GL_DEPTH_BUFFER_BIT|GL_VIEWPORT_BIT|GL_ENABLE_BIT);
	glBindFramebufferEXT (GL_FRAMEBUFFER_EXT, shadowBuffer);

	glViewport(0, 0, map_res, map_res);
	viewport[0]=0;
    viewport[1]=0;
    viewport[2]=map_res;
    viewport[3]=map_res;
		
	glClear(GL_DEPTH_BUFFER_BIT);

	glMatrixMode(GL_PROJECTION);
	glLoadIdentity();

	if (type==LIGHT_AREA)
	{
//		gluPerspective(cutoff, 1.0, nearclip, farclip);
		glOrtho(-ortho_size/2.0, ortho_size/2.0, -ortho_size/2.0, ortho_size/2.0, nearclip, farclip);	
	}
	else if (type==LIGHT_SPOT)
	{
		gluPerspective(cutoff, 1.0, nearclip, farclip);
	}	
	
	glGetFloatv(GL_PROJECTION_MATRIX, projectionMatrix);
	
	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
	


	Vector upVector(0,1,0);

	if (has_target)
	{
		if (sceneObjTarget)
		{
			target = sceneObjTarget->getPosition();
		}

		if (rotation.z && has_target)
		{
			glRotatef(-rotation.z,0,0,1);
			glPushMatrix();
		}		
		
		
		gluLookAt(position.x, position.y, position.z, target.x, target.y, target.z, upVector.x, upVector.y, upVector.z);
		
	}
	else
	{
		Vector i(0,0,1),f;
		
		matTransform(transMatrix,i,f);
				
		f.makeUnit();
		f*=-farclip;
		
		gluLookAt(position.x, position.y, position.z, position.x+f.x, position.y+f.y, position.z+f.z, upVector.x, upVector.y, upVector.z);
		
		if (parent)
		{
			parent->transformReverseBegin();
			glPushMatrix();
		}			
	}

	glGetFloatv(GL_MODELVIEW_MATRIX, viewMatrix);
	

#endif
#endif
#endif
};
void MyGame3DDevice::InitSceneEffectShader()
{
    // load shader
    HRSRC hResource = ::FindResource( NULL, MAKEINTRESOURCE(IDR_PIXEL_SHADER2), RT_RCDATA );
    HGLOBAL hResourceData = ::LoadResource( NULL, hResource );
    LPVOID pData = ::LockResource( hResourceData );

    ID3DXBuffer* errorBuffer = 0;
    HR( D3DXCreateEffect( pD3D9InstanceDevice,
                          pData,
                          ::SizeofResource( NULL, hResource ),
                          0,
                          0,
                          0,
                          0,
                          &mFX,
                          &errorBuffer ) );
    if( errorBuffer )
    {
        MyGameMessage( (char*)errorBuffer->GetBufferPointer() );
        errorBuffer->Release();
        return;
    }

    //initialize matrix in shader

    D3DXMATRIX worldMatrix;

    D3DXMatrixTranslation( &worldMatrix, -512.0f, -384.0f, 0 );

    D3DXVECTOR3 position( 0.0f, 0.0f, 20.0f );
    D3DXVECTOR3 targetPoint( 0.0f, 0.0f ,0.0f );
    D3DXVECTOR3 upVector( 0.0f, -1.0f, 0.0f );

    D3DXMATRIX	viewMatrix;

    D3DXMatrixLookAtLH( &viewMatrix, &position, &targetPoint, &upVector );

    D3DXMATRIX  projMatrix;

    D3DXMatrixOrthoLH( &projMatrix, 1024.0f, 768.0f, -1009.0f, 5000.0f );

    D3DXMatrixTranslation( &uiMoveMatrix, 0.0f, 0.0f, 0 );

    mhUIWorldMatHandle	= mFX->GetParameterByName( 0, worldMatName );
    mhUIViewMatHandle	= mFX->GetParameterByName( 0, viewMatName );
    mhUIProjMatHandle	= mFX->GetParameterByName( 0, projMatName );
    mhUIMoveMatHandle	= mFX->GetParameterByName( 0, moveMatName );
    mhAlphaEnabled		= mFX->GetParameterByName( 0, "alphaEnable" );
    D3DXHANDLE mhTech	= mFX->GetTechniqueByName( techniqueName );

    mhTex = mFX->GetParameterByName( 0, "gTex" );

    mFX->SetMatrix( mhUIWorldMatHandle, &worldMatrix );
    mFX->SetMatrix( mhUIViewMatHandle, &viewMatrix );
    mFX->SetMatrix( mhUIProjMatHandle, &projMatrix );
    mFX->SetMatrix( mhUIMoveMatHandle, &uiMoveMatrix );

    mFX->SetTechnique( mhTech );

    HR(mFX->SetBool( mhAlphaEnabled, FALSE ));

    return;
}
void MainController::run()
{
    while(!pangolin::ShouldQuit() && !((!logReader->hasMore()) && quiet) && !(eFusion->getTick() == end && quiet))
    {
        if(!gui->pause->Get() || pangolin::Pushed(*gui->step))
        {
            if((logReader->hasMore() || rewind) && eFusion->getTick() < end)
            {
                TICK("LogRead");
                if(rewind)
                {
                    if(!logReader->hasMore())
                    {
                        logReader->getBack();
                    }
                    else
                    {
                        logReader->getNext();
                    }

                    if(logReader->rewound())
                    {
                        logReader->currentFrame = 0;
                    }
                }
                else
                {
                    logReader->getNext();
                }
                TOCK("LogRead");

                if(eFusion->getTick() < start)
                {
                    eFusion->setTick(start);
                    logReader->fastForward(start);
                }

                float weightMultiplier = framesToSkip + 1;

                if(framesToSkip > 0)
                {
                    eFusion->setTick(eFusion->getTick() + framesToSkip);
                    logReader->fastForward(logReader->currentFrame + framesToSkip);
                    framesToSkip = 0;
                }

                Eigen::Matrix4f * currentPose = 0;

                if(groundTruthOdometry)
                {
                    currentPose = new Eigen::Matrix4f;
                    currentPose->setIdentity();
                    *currentPose = groundTruthOdometry->getIncrementalTransformation(logReader->timestamp);
                }

                eFusion->processFrame(logReader->rgb, logReader->depth, logReader->timestamp, currentPose, weightMultiplier);

                if(currentPose)
                {
                    delete currentPose;
                }

                if(frameskip && Stopwatch::getInstance().getTimings().at("Run") > 1000.f / 30.f)
                {
                    framesToSkip = int(Stopwatch::getInstance().getTimings().at("Run") / (1000.f / 30.f));
                }
            }
        }
        else
        {
            eFusion->predict();
        }

        TICK("GUI");

        if(gui->followPose->Get())
        {
            pangolin::OpenGlMatrix mv;

            Eigen::Matrix4f currPose = eFusion->getCurrPose();
            Eigen::Matrix3f currRot = currPose.topLeftCorner(3, 3);

            Eigen::Quaternionf currQuat(currRot);
            Eigen::Vector3f forwardVector(0, 0, 1);
            Eigen::Vector3f upVector(0, iclnuim ? 1 : -1, 0);

            Eigen::Vector3f forward = (currQuat * forwardVector).normalized();
            Eigen::Vector3f up = (currQuat * upVector).normalized();

            Eigen::Vector3f eye(currPose(0, 3), currPose(1, 3), currPose(2, 3));

            eye -= forward;

            Eigen::Vector3f at = eye + forward;

            Eigen::Vector3f z = (eye - at).normalized();  // Forward
            Eigen::Vector3f x = up.cross(z).normalized(); // Right
            Eigen::Vector3f y = z.cross(x);

            Eigen::Matrix4d m;
            m << x(0),  x(1),  x(2),  -(x.dot(eye)),
                 y(0),  y(1),  y(2),  -(y.dot(eye)),
                 z(0),  z(1),  z(2),  -(z.dot(eye)),
                    0,     0,     0,              1;

            memcpy(&mv.m[0], m.data(), sizeof(Eigen::Matrix4d));

            gui->s_cam.SetModelViewMatrix(mv);
        }

        gui->preCall();

        std::stringstream stri;
        stri << eFusion->getModelToModel().lastICPCount;
        gui->trackInliers->Ref().Set(stri.str());

        std::stringstream stre;
        stre << (isnan(eFusion->getModelToModel().lastICPError) ? 0 : eFusion->getModelToModel().lastICPError);
        gui->trackRes->Ref().Set(stre.str());

        if(!gui->pause->Get())
        {
            gui->resLog.Log((isnan(eFusion->getModelToModel().lastICPError) ? std::numeric_limits<float>::max() : eFusion->getModelToModel().lastICPError), icpErrThresh);
            gui->inLog.Log(eFusion->getModelToModel().lastICPCount, icpCountThresh);
        }

        Eigen::Matrix4f pose = eFusion->getCurrPose();

        if(gui->drawRawCloud->Get() || gui->drawFilteredCloud->Get())
        {
            eFusion->computeFeedbackBuffers();
        }

        if(gui->drawRawCloud->Get())
        {
            eFusion->getFeedbackBuffers().at(FeedbackBuffer::RAW)->render(gui->s_cam.GetProjectionModelViewMatrix(), pose, gui->drawNormals->Get(), gui->drawColors->Get());
        }

        if(gui->drawFilteredCloud->Get())
        {
            eFusion->getFeedbackBuffers().at(FeedbackBuffer::FILTERED)->render(gui->s_cam.GetProjectionModelViewMatrix(), pose, gui->drawNormals->Get(), gui->drawColors->Get());
        }

        if(gui->drawGlobalModel->Get())
        {
            glFinish();
            TICK("Global");

            if(gui->drawFxaa->Get())
            {
                gui->drawFXAA(gui->s_cam.GetProjectionModelViewMatrix(),
                              gui->s_cam.GetModelViewMatrix(),
                              eFusion->getGlobalModel().model(),
                              eFusion->getConfidenceThreshold(),
                              eFusion->getTick(),
                              eFusion->getTimeDelta(),
                              iclnuim);
            }
            else
            {
                eFusion->getGlobalModel().renderPointCloud(gui->s_cam.GetProjectionModelViewMatrix(),
                                                           eFusion->getConfidenceThreshold(),
                                                           gui->drawUnstable->Get(),
                                                           gui->drawNormals->Get(),
                                                           gui->drawColors->Get(),
                                                           gui->drawPoints->Get(),
                                                           gui->drawWindow->Get(),
                                                           gui->drawTimes->Get(),
                                                           eFusion->getTick(),
                                                           eFusion->getTimeDelta());
            }
            glFinish();
            TOCK("Global");
        }

        if(eFusion->getLost())
        {
            glColor3f(1, 1, 0);
        }
        else
        {
            glColor3f(1, 0, 1);
        }
        gui->drawFrustum(pose);
        glColor3f(1, 1, 1);

        if(gui->drawFerns->Get())
        {
            glColor3f(0, 0, 0);
            for(size_t i = 0; i < eFusion->getFerns().frames.size(); i++)
            {
                if((int)i == eFusion->getFerns().lastClosest)
                    continue;

                gui->drawFrustum(eFusion->getFerns().frames.at(i)->pose);
            }
            glColor3f(1, 1, 1);
        }

        if(gui->drawDefGraph->Get())
        {
            const std::vector<GraphNode*> & graph = eFusion->getLocalDeformation().getGraph();

            for(size_t i = 0; i < graph.size(); i++)
            {
                pangolin::glDrawCross(graph.at(i)->position(0),
                                      graph.at(i)->position(1),
                                      graph.at(i)->position(2),
                                      0.1);

                for(size_t j = 0; j < graph.at(i)->neighbours.size(); j++)
                {
                    pangolin::glDrawLine(graph.at(i)->position(0),
                                         graph.at(i)->position(1),
                                         graph.at(i)->position(2),
                                         graph.at(graph.at(i)->neighbours.at(j))->position(0),
                                         graph.at(graph.at(i)->neighbours.at(j))->position(1),
                                         graph.at(graph.at(i)->neighbours.at(j))->position(2));
                }
            }
        }

        if(eFusion->getFerns().lastClosest != -1)
        {
            glColor3f(1, 0, 0);
            gui->drawFrustum(eFusion->getFerns().frames.at(eFusion->getFerns().lastClosest)->pose);
            glColor3f(1, 1, 1);
        }

        const std::vector<PoseMatch> & poseMatches = eFusion->getPoseMatches();

        int maxDiff = 0;
        for(size_t i = 0; i < poseMatches.size(); i++)
        {
            if(poseMatches.at(i).secondId - poseMatches.at(i).firstId > maxDiff)
            {
                maxDiff = poseMatches.at(i).secondId - poseMatches.at(i).firstId;
            }
        }

        for(size_t i = 0; i < poseMatches.size(); i++)
        {
            if(gui->drawDeforms->Get())
            {
                if(poseMatches.at(i).fern)
                {
                    glColor3f(1, 0, 0);
                }
                else
                {
                    glColor3f(0, 1, 0);
                }
                for(size_t j = 0; j < poseMatches.at(i).constraints.size(); j++)
                {
                    pangolin::glDrawLine(poseMatches.at(i).constraints.at(j).sourcePoint(0), poseMatches.at(i).constraints.at(j).sourcePoint(1), poseMatches.at(i).constraints.at(j).sourcePoint(2),
                                         poseMatches.at(i).constraints.at(j).targetPoint(0), poseMatches.at(i).constraints.at(j).targetPoint(1), poseMatches.at(i).constraints.at(j).targetPoint(2));
                }
            }
        }
        glColor3f(1, 1, 1);

        eFusion->normaliseDepth(0.3f, gui->depthCutoff->Get());

        for(std::map<std::string, GPUTexture*>::const_iterator it = eFusion->getTextures().begin(); it != eFusion->getTextures().end(); ++it)
        {
            if(it->second->draw)
            {
                gui->displayImg(it->first, it->second);
            }
        }

        eFusion->getIndexMap().renderDepth(gui->depthCutoff->Get());

        gui->displayImg("ModelImg", eFusion->getIndexMap().imageTex());
        gui->displayImg("Model", eFusion->getIndexMap().drawTex());

        std::stringstream strs;
        strs << eFusion->getGlobalModel().lastCount();

        gui->totalPoints->operator=(strs.str());

        std::stringstream strs2;
        strs2 << eFusion->getLocalDeformation().getGraph().size();

        gui->totalNodes->operator=(strs2.str());

        std::stringstream strs3;
        strs3 << eFusion->getFerns().frames.size();

        gui->totalFerns->operator=(strs3.str());

        std::stringstream strs4;
        strs4 << eFusion->getDeforms();

        gui->totalDefs->operator=(strs4.str());

        std::stringstream strs5;
        strs5 << eFusion->getTick() << "/" << logReader->getNumFrames();

        gui->logProgress->operator=(strs5.str());

        std::stringstream strs6;
        strs6 << eFusion->getFernDeforms();

        gui->totalFernDefs->operator=(strs6.str());

        gui->postCall();

        logReader->flipColors = gui->flipColors->Get();
        eFusion->setRgbOnly(gui->rgbOnly->Get());
        eFusion->setPyramid(gui->pyramid->Get());
        eFusion->setFastOdom(gui->fastOdom->Get());
        eFusion->setConfidenceThreshold(gui->confidenceThreshold->Get());
        eFusion->setDepthCutoff(gui->depthCutoff->Get());
        eFusion->setIcpWeight(gui->icpWeight->Get());
        eFusion->setSo3(gui->so3->Get());
        eFusion->setFrameToFrameRGB(gui->frameToFrameRGB->Get());

        resetButton = pangolin::Pushed(*gui->reset);

        if(gui->autoSettings)
        {
            static bool last = gui->autoSettings->Get();

            if(gui->autoSettings->Get() != last)
            {
                last = gui->autoSettings->Get();
                static_cast<LiveLogReader *>(logReader)->setAuto(last);
            }
        }

        Stopwatch::getInstance().sendAll();

        if(resetButton)
        {
            break;
        }

        if(pangolin::Pushed(*gui->save))
        {
            eFusion->savePly();
        }

        TOCK("GUI");
    }
}
Beispiel #26
0
QQuaternion QCamera::panRotation(float angle) const
{
    return QQuaternion::fromAxisAndAngle(upVector(), angle);
}
		void GLSoftSpriteRenderer::Render() {
			SPADES_MARK_FUNCTION();
			lastImage = NULL;
			program->Use();

			device->Enable(IGLDevice::Blend, true);
			device->BlendFunc(IGLDevice::One, IGLDevice::OneMinusSrcAlpha);

			projectionViewMatrix(program);
			rightVector(program);
			frontVector(program);
			viewOriginVector(program);
			upVector(program);
			texture(program);
			depthTexture(program);
			viewMatrix(program);
			fogDistance(program);
			fogColor(program);
			zNearFar(program);

			positionAttribute(program);
			spritePosAttribute(program);
			colorAttribute(program);

			projectionViewMatrix.SetValue(renderer->GetProjectionViewMatrix());
			viewMatrix.SetValue(renderer->GetViewMatrix());

			fogDistance.SetValue(renderer->GetFogDistance());

			Vector3 fogCol = renderer->GetFogColor();
			fogCol *= fogCol; // linearize
			fogColor.SetValue(fogCol.x, fogCol.y, fogCol.z);

			const client::SceneDefinition &def = renderer->GetSceneDef();
			rightVector.SetValue(def.viewAxis[0].x, def.viewAxis[0].y, def.viewAxis[0].z);
			upVector.SetValue(def.viewAxis[1].x, def.viewAxis[1].y, def.viewAxis[1].z);
			frontVector.SetValue(def.viewAxis[2].x, def.viewAxis[2].y, def.viewAxis[2].z);

			viewOriginVector.SetValue(def.viewOrigin.x, def.viewOrigin.y, def.viewOrigin.z);
			texture.SetValue(0);
			depthTexture.SetValue(1);
			zNearFar.SetValue(def.zNear, def.zFar);

			device->ActiveTexture(1);
			device->BindTexture(IGLDevice::Texture2D,
			                    renderer->GetFramebufferManager()->GetDepthTexture());
			device->ActiveTexture(0);

			device->EnableVertexAttribArray(positionAttribute(), true);
			device->EnableVertexAttribArray(spritePosAttribute(), true);
			device->EnableVertexAttribArray(colorAttribute(), true);

			thresLow = tanf(def.fovX * .5f) * tanf(def.fovY * .5f) * 1.8f;
			thresRange = thresLow * .5f;

			// full-resolution sprites
			{
				GLProfiler::Context measure(renderer->GetGLProfiler(), "Full Resolution");
				for (size_t i = 0; i < sprites.size(); i++) {
					Sprite &spr = sprites[i];
					float layer = LayerForSprite(spr);
					if (layer == 1.f)
						continue;
					if (spr.image != lastImage) {
						Flush();
						lastImage = spr.image;
						SPAssert(vertices.empty());
					}

					Vertex v;
					v.x = spr.center.x;
					v.y = spr.center.y;
					v.z = spr.center.z;
					v.radius = spr.radius;
					v.angle = spr.angle;
					v.r = spr.color.x;
					v.g = spr.color.y;
					v.b = spr.color.z;
					v.a = spr.color.w;

					float fade = 1.f - layer;
					v.r *= fade;
					v.g *= fade;
					v.b *= fade;
					v.a *= fade;

					uint32_t idx = (uint32_t)vertices.size();
					v.sx = -1;
					v.sy = -1;
					vertices.push_back(v);
					v.sx = 1;
					v.sy = -1;
					vertices.push_back(v);
					v.sx = -1;
					v.sy = 1;
					vertices.push_back(v);
					v.sx = 1;
					v.sy = 1;
					vertices.push_back(v);

					indices.push_back(idx);
					indices.push_back(idx + 1);
					indices.push_back(idx + 2);
					indices.push_back(idx + 1);
					indices.push_back(idx + 3);
					indices.push_back(idx + 2);
				}

				Flush();
			}

			// low-res sprites
			IGLDevice::UInteger lastFb = device->GetInteger(IGLDevice::FramebufferBinding);
			int sW = device->ScreenWidth(), sH = device->ScreenHeight();
			int lW = (sW + 3) / 4, lH = (sH + 3) / 4;
			int numLowResSprites = 0;
			GLColorBuffer buf = renderer->GetFramebufferManager()->CreateBufferHandle(lW, lH, true);
			device->BindFramebuffer(IGLDevice::Framebuffer, buf.GetFramebuffer());
			device->ClearColor(0.f, 0.f, 0.f, 0.f);
			device->Clear(IGLDevice::ColorBufferBit);
			device->BlendFunc(IGLDevice::One, IGLDevice::OneMinusSrcAlpha);
			device->Viewport(0, 0, lW, lH);
			{
				GLProfiler::Context measure(renderer->GetGLProfiler(), "Low Resolution");
				for (size_t i = 0; i < sprites.size(); i++) {
					Sprite &spr = sprites[i];
					float layer = LayerForSprite(spr);
					if (layer == 0.f)
						continue;
					if (spr.image != lastImage) {
						Flush();
						lastImage = spr.image;
						SPAssert(vertices.empty());
					}

					numLowResSprites++;

					Vertex v;
					v.x = spr.center.x;
					v.y = spr.center.y;
					v.z = spr.center.z;
					v.radius = spr.radius;
					v.angle = spr.angle;
					v.r = spr.color.x;
					v.g = spr.color.y;
					v.b = spr.color.z;
					v.a = spr.color.w;

					float fade = layer;
					v.r *= fade;
					v.g *= fade;
					v.b *= fade;
					v.a *= fade;

					uint32_t idx = (uint32_t)vertices.size();
					v.sx = -1;
					v.sy = -1;
					vertices.push_back(v);
					v.sx = 1;
					v.sy = -1;
					vertices.push_back(v);
					v.sx = -1;
					v.sy = 1;
					vertices.push_back(v);
					v.sx = 1;
					v.sy = 1;
					vertices.push_back(v);

					indices.push_back(idx);
					indices.push_back(idx + 1);
					indices.push_back(idx + 2);
					indices.push_back(idx + 1);
					indices.push_back(idx + 3);
					indices.push_back(idx + 2);
				}
				Flush();
			}

			// finalize

			device->ActiveTexture(1);
			device->BindTexture(IGLDevice::Texture2D, 0);
			device->ActiveTexture(0);
			device->BindTexture(IGLDevice::Texture2D, 0);
			device->EnableVertexAttribArray(positionAttribute(), false);
			device->EnableVertexAttribArray(spritePosAttribute(), false);
			device->EnableVertexAttribArray(colorAttribute(), false);

			// composite downsampled sprite
			device->BlendFunc(IGLDevice::One, IGLDevice::OneMinusSrcAlpha);
			if (numLowResSprites > 0) {
				GLProfiler::Context measure(renderer->GetGLProfiler(), "Finalize");
				GLQuadRenderer qr(device);

				// do gaussian blur
				GLProgram *program =
				  renderer->RegisterProgram("Shaders/PostFilters/Gauss1D.program");
				static GLProgramAttribute blur_positionAttribute("positionAttribute");
				static GLProgramUniform blur_textureUniform("mainTexture");
				static GLProgramUniform blur_unitShift("unitShift");
				program->Use();
				blur_positionAttribute(program);
				blur_textureUniform(program);
				blur_unitShift(program);
				blur_textureUniform.SetValue(0);
				device->ActiveTexture(0);
				qr.SetCoordAttributeIndex(blur_positionAttribute());
				device->Enable(IGLDevice::Blend, false);

				// x-direction
				GLColorBuffer buf2 =
				  renderer->GetFramebufferManager()->CreateBufferHandle(lW, lH, true);
				device->BindTexture(IGLDevice::Texture2D, buf.GetTexture());
				device->BindFramebuffer(IGLDevice::Framebuffer, buf2.GetFramebuffer());
				blur_unitShift.SetValue(1.f / lW, 0.f);
				qr.Draw();
				buf.Release();

				// x-direction
				GLColorBuffer buf3 =
				  renderer->GetFramebufferManager()->CreateBufferHandle(lW, lH, true);
				device->BindTexture(IGLDevice::Texture2D, buf2.GetTexture());
				device->BindFramebuffer(IGLDevice::Framebuffer, buf3.GetFramebuffer());
				blur_unitShift.SetValue(0.f, 1.f / lH);
				qr.Draw();
				buf2.Release();

				buf = buf3;

				device->Enable(IGLDevice::Blend, true);

				// composite
				program = renderer->RegisterProgram("Shaders/PostFilters/PassThrough.program");
				static GLProgramAttribute positionAttribute("positionAttribute");
				static GLProgramUniform colorUniform("colorUniform");
				static GLProgramUniform textureUniform("mainTexture");
				static GLProgramUniform texCoordRange("texCoordRange");

				positionAttribute(program);
				textureUniform(program);
				texCoordRange(program);
				colorUniform(program);

				program->Use();

				textureUniform.SetValue(0);
				texCoordRange.SetValue(0.f, 0.f, 1.f, 1.f);
				colorUniform.SetValue(1.f, 1.f, 1.f, 1.f);

				qr.SetCoordAttributeIndex(positionAttribute());
				device->BindFramebuffer(IGLDevice::Framebuffer, lastFb);
				device->BindTexture(IGLDevice::Texture2D, buf.GetTexture());
				device->Viewport(0, 0, sW, sH);
				qr.Draw();
				device->BindTexture(IGLDevice::Texture2D, 0);

			} else {
				device->Viewport(0, 0, sW, sH);

				device->BindFramebuffer(IGLDevice::Framebuffer, lastFb);
			}

			buf.Release();
		}
hkpRigidBody* DestructibleBridgeUtil::createAirplane(const AirplaneData& data)
{

	//
	// create all necessary shapes that make up our airplane
	//
	hkArray<hkpShape*> shapes;
	{
		// create airplane's body
		{
			hkpBoxShape* bodyShape = createBoxShape(AIRPLANE_BODY_WIDTH, AIRPLANE_BODY_HEIGHT, AIRPLANE_BODY_LENGTH, data.m_wingSpan);
			createConvexTranslateShapeAndAddToArray(bodyShape, 0.0f, 0.0f, 0.0f, data.m_wingSpan, shapes);
			bodyShape->removeReference();
		}

		// create airplane's left and right wing
		{
			hkpBoxShape* wingShape = createBoxShape(AIRPLANE_WING_LENGTH, AIRPLANE_WING_HEIGHT, AIRPLANE_WING_WIDTH, data.m_wingSpan);
			createConvexTranslateShapeAndAddToArray(wingShape, -AIRPLANE_WING_LENGTH * 0.5f, (AIRPLANE_BODY_HEIGHT+AIRPLANE_WING_HEIGHT) * 0.5f, AIRPLANE_BODY_LENGTH / 6.0f, data.m_wingSpan, shapes);
			createConvexTranslateShapeAndAddToArray(wingShape,  AIRPLANE_WING_LENGTH * 0.5f, (AIRPLANE_BODY_HEIGHT+AIRPLANE_WING_HEIGHT) * 0.5f, AIRPLANE_BODY_LENGTH / 6.0f, data.m_wingSpan, shapes);
			wingShape->removeReference();
		}

		// create airplane's tail wing
		{
			hkpBoxShape* tailWingShape = createBoxShape(AIRPLANE_TAIL_WIDTH, AIRPLANE_TAIL_HEIGHT, AIRPLANE_TAIL_LENGTH, data.m_wingSpan);
			createConvexTranslateShapeAndAddToArray(tailWingShape, 0.0f, (AIRPLANE_BODY_HEIGHT+AIRPLANE_WING_HEIGHT) * 0.5f, -AIRPLANE_BODY_LENGTH * 0.5f, data.m_wingSpan, shapes);
			tailWingShape->removeReference();
		}

		// create airplane's rudder
		{
			hkpBoxShape* rudderShape = createBoxShape(AIRPLANE_RUDDER_WIDTH, AIRPLANE_RUDDER_HEIGHT, AIRPLANE_RUDDER_LENGTH, data.m_wingSpan);
			createConvexTranslateShapeAndAddToArray(rudderShape, 0.0f, (AIRPLANE_BODY_HEIGHT+AIRPLANE_RUDDER_HEIGHT) * 0.5f, -AIRPLANE_BODY_LENGTH * 0.5f, data.m_wingSpan, shapes);
			rudderShape->removeReference();
		}

		// create the bombs
		{
			hkpBoxShape* bombShape = createBoxShape(AIRPLANE_BOMB_WIDTH, AIRPLANE_BOMB_HEIGHT, AIRPLANE_BOMB_LENGTH, data.m_wingSpan);

			{
				hkReal distance = (AIRPLANE_WING_LENGTH*2.0f) / AIRPLANE_NUM_BOMBS;
				hkReal posX = AIRPLANE_WING_LENGTH-AIRPLANE_BOMB_WIDTH;
				{
					for (int i=0; i<AIRPLANE_NUM_BOMBS; i++)
					{
						createConvexTranslateShapeAndAddToArray(bombShape, posX, (AIRPLANE_BODY_HEIGHT-AIRPLANE_BOMB_HEIGHT) * 0.5f, AIRPLANE_BODY_LENGTH / 6.0f, data.m_wingSpan, shapes);
						posX -= distance;
					}
				}
			}

			bombShape->removeReference();
		}
	}

	//
	// create the complete shape for the airplane
	//
	hkpShape* airplaneShape;
	{
#if defined(USE_MOPP_FOR_AIRPLANE)
		hkpListShape* listShape = new hkpListShape(shapes.begin(), shapes.getSize());

		hkpMoppCompilerInput mci;
		mci.m_enableChunkSubdivision = true;

		hkpMoppCode* code = hkpMoppUtility::buildCode( listShape , mci);
		airplaneShape = new hkpMoppBvTreeShape(listShape, code);
		listShape->removeReference();
#elif defined(USE_CONVEX_LIST_SHAPE_FOR_AIRPLANE)
		airplaneShape = new hkpConvexListShape( (const hkpConvexShape*const*)shapes.begin(), shapes.getSize());
#else // if defined(USE_LIST_SHAPE_FOR_AIRPLANE)
		airplaneShape = new hkpListShape(shapes.begin(), shapes.getSize());
#endif

		{
			for (int i=0; i<shapes.getSize(); i++)
			{
				shapes[i]->removeReference();
			}
		}
	}

	//
	// create airplane
	//
	hkVector4 airplaneDirection;
	hkpRigidBody* airplane;
	{
		hkpRigidBodyCinfo planeInfo;
		{
			planeInfo.m_shape			= airplaneShape;
			planeInfo.m_position		= data.m_position;
			planeInfo.m_motionType		= hkpMotion::MOTION_DYNAMIC;
			planeInfo.m_friction		= 0.5f;
			planeInfo.m_qualityType		= HK_COLLIDABLE_QUALITY_MOVING;
			planeInfo.m_angularDamping	= 0.7f;
			hkpInertiaTensorComputer::setShapeVolumeMassProperties(airplaneShape, data.m_mass, planeInfo);
		}

		airplaneDirection = data.m_destination;
		airplaneDirection.sub4(data.m_position);
		airplaneDirection.normalize3();

		hkRotation rotation;
		{
			hkVector4 upVector(0.0f, 1.0f, 0.0f);
			hkVector4Util::buildOrthonormal(airplaneDirection, upVector, rotation);
			rotation.getColumn(2).setNeg4(rotation.getColumn(2));
			hkAlgorithm::swap16(rotation.getColumn(0), rotation.getColumn(2));
		}
		planeInfo.m_rotation.setAndNormalize(rotation);

		airplane = new hkpRigidBody(planeInfo);

		airplaneShape->removeReference();
	}

	//
	// push airplane
	//
	{
		hkVector4 velocityVec;
		velocityVec.setMul4( data.m_velocity, airplaneDirection );
		airplane->setLinearVelocity( velocityVec );
	}

	return airplane;
}
//////////////////////////////////////////////////////////////////////////
// 행렬 세팅
//
// World, View, Projection
//////////////////////////////////////////////////////////////////////////
VOID SetupMatrices()
{
	// World
	D3DXMATRIXA16 worldMatrix;
	
	// float 연산의 정밀도를 위해서 1000으로 나머지 연산
	UINT  time = timeGetTime() % 1000;
	
	// 1초마다 한바퀴씩(2 * pi) 회전 할 각도
	FLOAT angle = time * ( 2.0f * D3DX_PI ) / 1000.0f;
	
	// Y축 기준으로 회전하는 행렬 생성
	D3DXMatrixRotationY( &worldMatrix, angle );
	
	// 생성한 회전 행렬을 World 행렬로 디바이스에 설정
	g_pd3dDevice->SetTransform( D3DTS_WORLD, &worldMatrix );
	

	//////////////////////////////////////////////////////////////////////////
	// View 행렬을 정의하기 위해서 세가지 값이 필요하다.
	// 원점, 시점, 업벡터
	//////////////////////////////////////////////////////////////////////////
	
	// 1. 눈의 위치			( 0, 3.0, -5)
	D3DXVECTOR3 eyePoint( 0.0f, 3.0f, -5.0f );
	
	// 2. 눈이 바라보는 위치	( 0, 0, 0 )
	D3DXVECTOR3 lookAtPoint( 0.0f, 0.0f, 0.0f );
	
	// 3. 업벡터				( 0, 1, 0 )
	D3DXVECTOR3 upVector( 0.0f, 1.0f, 0.0f );
	
	D3DXMATRIXA16 viewMatrix;
	// 1, 2, 3의 값으로 View 행렬 생성
	D3DXMatrixLookAtLH( &viewMatrix, &eyePoint, &lookAtPoint, &upVector );
	
	// 생성한 View 행렬을 디바이스에 설정
	g_pd3dDevice->SetTransform( D3DTS_VIEW, &viewMatrix );
	
	// Projection 행렬을 정의하기 위해서는 시야각(FOV=Field Of View)과 종횡비(aspect ratio), 클리핑 평면 값이 필요하다.
	D3DXMATRIXA16 projMatrix;

	/// matProj   : 값이 설정될 행렬
	/// D3DX_PI/4 : FOV(D3DX_PI/4 = 45도)
	/// 1.0f      : 종횡비
	/// 1.0f      : 근접 클리핑 평면(near clipping plane)
	/// 100.0f    : 원거리 클리핑 평면(far clipping plane)
	D3DXMatrixPerspectiveFovLH(

		// 행렬을 얻어올 변수
		&projMatrix,

		// FOV(D3DX_PI/4 = 45도)
		D3DX_PI / 4,
		
		// 종횡비 1:1
		1.0f,
		
		// Near Plane
		1.0f,
		
		// Far Plane
		100.0f
		
		);

	// 생성한 Projection 행렬을 디바이스에 설정
	g_pd3dDevice->SetTransform( D3DTS_PROJECTION, &projMatrix );
}