Beispiel #1
0
vec3 Camera::pickAgainstPlane(float x, float y, vec4 plane)
{
    float nxPos = x / 1280.0f; //replace these with your screen width and height
    float nyPos = y / 720.0f;

    float sxPos = nxPos - 0.5f;
    float syPos = nyPos - 0.5f;

    float fxPos = sxPos * 2;
    float fyPos = syPos * -2;

    mat4 inv_viewproj = glm::inverse(view_proj); //view_proj is the memeber variable

    vec4 mouse_pos(fxPos, fyPos, 1, 1);
    vec4 world_pos = inv_viewproj * mouse_pos;

    world_pos /= world_pos.w;

    vec3 cam_pos = world[3].xyz(); //world is the member variable
    vec3 dir = world_pos.xyz() - cam_pos;

    float t = -(glm::dot(cam_pos, plane.xyz()) + plane.w)
        / (glm::dot(dir, plane.xyz()));

    vec3 result = cam_pos + dir * t;

    return result;
}
vec3 vec3::rotate(const vec4&v) const
{
    vec4 i = v.conjugate();
    i.normalize();
    vec4 t = v.multiply(*this);
    vec4 f = t.multiply(i);
    return vec3(f.x, f.y, f.w);
}
Beispiel #3
0
mat4 outer_product( const vec4& a, const vec4& b )
{
	mat4 o;
	m128_t* const o128 = o.m128();
	o128[0] = SLMATH_MUL_PS( SLMATH_LOAD_PS1(&a[0]), b.m128() );
	o128[1] = SLMATH_MUL_PS( SLMATH_LOAD_PS1(&a[1]), b.m128() );
	o128[2] = SLMATH_MUL_PS( SLMATH_LOAD_PS1(&a[2]), b.m128() );
	o128[3] = SLMATH_MUL_PS( SLMATH_LOAD_PS1(&a[3]), b.m128() );
	return o;
}
Beispiel #4
0
vec4 sm4::operator * (vec4 v) {
  vec4 r;

  r.x = v.dot(getColumn(0));
  r.y = v.dot(getColumn(1));
  r.z = v.dot(getColumn(2));
  r.w = v.dot(getColumn(3));

  return r;
}
Beispiel #5
0
		virtual void user_init()
		{	
			tex::initialise();							
			msh.fill(5.0f, 2.5f, vec3::ZERO_VEC3, 90, color4::WHITE4);
			
			Lightangx = ang = 0.0f;
		
			fr = 0;
			fpsstr = "Wait for results...";
									
			aniso.load("aniso.tga");
			texture_settings sett = { RWWM_MIRRORED_REPEAT, RWWM_CLAMP, RWWM_CLAMP,
									  RWMAGFT_LINEAR, RWMINFT_LINEAR, RWTIF_RGBA, RWTOF_RGBA };
			aniso.generate(sett);	
			
			FONTMANAGER.add("fps", 12, "cour.ttf");			
			CGPROGRAMMANAGER.open_program("anisoV.cg", "basicV", RWCP_VERTEX);		
			CGPROGRAMMANAGER.open_program("anisoF.cg", "basicF", RWCP_FRAGMENT);					
			
			pos = vec3(8.0f, 0.0f, 0.0f);
			
			LightDir.x = pos.x;
			LightDir.y = pos.y;
			LightDir.z = pos.z;
			LightDir.w = 1.0f;
			LightDir.normalize();
			
			eye = vec3(0.0f, 5.0f, 30.0f);
			
			RENDERER.disable(RWS_LIGHTING);									
			RENDERER.enable(RWS_TEXTURE_2D);
			RENDERER.enable(RWS_DEPTH_TEST);			
		}	
Beispiel #6
0
bool
GroundRenderer::setup(const mat4& projection, unsigned int texture)
{
    projection_ = projection;
    texture_ = texture;

    // Program set up
    static const vec4 materialDiffuse(0.3f, 0.3f, 0.3f, 1.0f);
    static const string vtx_shader_filename(GLMARK_DATA_PATH"/shaders/shadow.vert");
    static const string frg_shader_filename(GLMARK_DATA_PATH"/shaders/shadow.frag");
    ShaderSource vtx_source(vtx_shader_filename);
    ShaderSource frg_source(frg_shader_filename);

    vtx_source.add_const("MaterialDiffuse", materialDiffuse);

    if (!Scene::load_shaders_from_strings(program_, vtx_source.str(), frg_source.str())) {
        return false;
    }
    positionLocation_ = program_["position"].location();

    // Set up the position data for our "quad".
    vertices_.push_back(vec2(-1.0, -1.0));
    vertices_.push_back(vec2(1.0, -1.0));
    vertices_.push_back(vec2(-1.0, 1.0));
    vertices_.push_back(vec2(1.0, 1.0));

    // Set up the VBO and stash our position data in it.
    glGenBuffers(1, &bufferObject_);
    glBindBuffer(GL_ARRAY_BUFFER, bufferObject_);
    glBufferData(GL_ARRAY_BUFFER, vertices_.size() * sizeof(vec2),
                 &vertices_.front(), GL_STATIC_DRAW);
    glBindBuffer(GL_ARRAY_BUFFER, 0);

    // Set up the light matrix with a bias that will convert values
    // in the range of [-1, 1] to [0, 1)], then add in the projection
    // and the "look at" matrix from the light position.
    light_ *= LibMatrix::Mat4::translate(0.5, 0.5, 0.5);
    light_ *= LibMatrix::Mat4::scale(0.5, 0.5, 0.5);
    light_ *= projection_;
    light_ *= LibMatrix::Mat4::lookAt(lightPosition.x(), lightPosition.y(), lightPosition.z(),
                                      0.0, 0.0, 0.0,
                                      0.0, 1.0, 0.0);

    return true;
}
Beispiel #7
0
void SceneObject::getLightSource1(SceneObjects& sceneObjectColl, vec4& from, vec4& to)
{
	from.set(0);
	to.set(0);

	// let them update model matrix, we'll take translation as mesh centre
	getRoot(sceneObjectColl).update(0, mat4(), mat4());

	// these are mark meshes, not for display, but for light source
	SceneObjects::iterator _from = sceneObjectColl.find("light1/from"), _to = sceneObjectColl.find("light1/to"), light1 = sceneObjectColl.find("light1");
	if(sceneObjectColl.end() == _from || sceneObjectColl.end() == _to || sceneObjectColl.end() == light1)
		return;

	mcemaths_quatcpy(from, &_from->second.m_model[12]);
	mcemaths_quatcpy(to, &_to->second.m_model[12]);
	from.w = to.w = 0;

	light1->second.m_children.clear();
	sceneObjectColl.erase(_from);
	sceneObjectColl.erase(_to);
}
Beispiel #8
0
void Program::setUniformDirectly(int nLoc, uint32_t type, const vec4& value)
{
#if !defined(ET_CONSOLE_APPLICATION)
	if (nLoc == -1) return;
	
	(void)type;
	ET_ASSERT(type == GL_FLOAT_VEC4);
	ET_ASSERT(apiHandleValid());
	
	glUniform4fv(nLoc, 1, value.data());
	checkOpenGLError("glUniform4fv");
#endif
}
// Transform (i.e. multiply) a vector by this matrix.
void 
mat4::transform3 (vec4 &v) const
{
  vec4 aux;
  const float *m = this->m_Matrix;

  aux.x = (v.x * m[0]) + (v.y * m[4]) + (v.z * m[8]) ;
  aux.y = (v.x * m[1]) + (v.y * m[5]) + (v.z * m[9]) ;
  aux.z = (v.x * m[2]) + (v.y * m[6]) + (v.z * m[10]);
  aux.w = v.w;
  v.copy(aux);
  
  return;
}
Beispiel #10
0
void Program::setUniform(int nLoc, uint32_t type, const vec4& value, bool forced)
{
	if (nLoc == -1) return;
	
	(void)type;
	assert(type == GL_FLOAT_VEC4);
	assert(loaded());
	
	if (forced || ((_vec4Cache.count(nLoc) == 0) || (_vec4Cache[nLoc] != value)))
	{
		_vec4Cache[nLoc] = value;
		glUniform4fv(nLoc, 1, value.data());
	}
	
	checkOpenGLError("setUniform - vec4");
}
// Transform (i.e. multiply) a vector by this matrix.
void 
mat4::transform (vec4 &v) const
{
  vec4 aux;
  const float *m = this->m_Matrix;

  aux.x = (v.x * m[0]) + (v.y * m[4]) + (v.z * m[8]) + (v.w * m[12]);
  aux.y = (v.x * m[1]) + (v.y * m[5]) + (v.z * m[9]) + (v.w * m[13]);
  aux.z = (v.x * m[2]) + (v.y * m[6]) + (v.z * m[10]) +(v.w * m[14]);
  aux.w = (v.x * m[3]) + (v.y * m[7]) + (v.z * m[11]) +(v.w * m[15]);
  
  aux *= (1/aux.w);

  v.copy(aux);
  
  return;
}
Beispiel #12
0
void Program::setUniform(int nLoc, uint32_t type, const vec4& value, bool forced)
{
#if !defined(ET_CONSOLE_APPLICATION)
	if (nLoc == -1) return;
	
	(void)type;
	ET_ASSERT(type == GL_FLOAT_VEC4);
	ET_ASSERT(apiHandleValid());
	
	if (forced || ((_vec4Cache.count(nLoc) == 0) || (_vec4Cache[nLoc] != value)))
	{
		_vec4Cache[nLoc] = value;
		glUniform4fv(nLoc, 1, value.data());
		checkOpenGLError("glUniform4fv");
	}
	
#endif
}
Beispiel #13
0
void Mesh::RenderAAQuadAlongXNinePatch(const vec3 &bottom_left,
                                       const vec3 &top_right,
                                       const vec2i &texture_size,
                                       const vec4 &patch_info) {
  static const Attribute format[] = {kPosition3f, kTexCoord2f, kEND};
  static const unsigned short indices[] = {
      0, 2, 1,  1,  2, 3,  2, 4,  3,  3,  4,  5,  4,  6,  5,  5,  6,  7,
      1, 3, 8,  8,  3, 9,  3, 5,  9,  9,  5,  10, 5,  7,  10, 10, 7,  11,
      8, 9, 12, 12, 9, 13, 9, 10, 13, 13, 10, 14, 10, 11, 14, 14, 11, 15,
  };
  auto max = vec2::Max(bottom_left.xy(), top_right.xy());
  auto min = vec2::Min(bottom_left.xy(), top_right.xy());
  auto p0 = vec2(texture_size) * patch_info.xy() + min;
  auto p1 = max - vec2(texture_size) * (mathfu::kOnes2f - patch_info.zw());

  // Check if the 9 patch edges are not overwrapping.
  // In that case, adjust 9 patch geometry locations not to overwrap.
  if (p0.x() > p1.x()) {
    p0.x() = p1.x() = (min.x() + max.x()) / 2;
  }
  if (p0.y() > p1.y()) {
    p0.y() = p1.y() = (min.y() + max.y()) / 2;
  }

  // vertex format is [x, y, z] [u, v]:
  float z = bottom_left.z();
  // clang-format off
  const float vertices[] = {
      min.x(), min.y(), z, 0.0f,           0.0f,
      p0.x(),  min.y(), z, patch_info.x(), 0.0f,
      min.x(), p0.y(),  z, 0.0f,           patch_info.y(),
      p0.x(),  p0.y(),  z, patch_info.x(), patch_info.y(),
      min.x(), p1.y(),  z, 0.0,            patch_info.w(),
      p0.x(),  p1.y(),  z, patch_info.x(), patch_info.w(),
      min.x(), max.y(), z, 0.0,            1.0,
      p0.x(),  max.y(), z, patch_info.x(), 1.0,
      p1.x(),  min.y(), z, patch_info.z(), 0.0f,
      p1.x(),  p0.y(),  z, patch_info.z(), patch_info.y(),
      p1.x(),  p1.y(),  z, patch_info.z(), patch_info.w(),
      p1.x(),  max.y(), z, patch_info.z(), 1.0f,
      max.x(), min.y(), z, 1.0f,           0.0f,
      max.x(), p0.y(),  z, 1.0f,           patch_info.y(),
      max.x(), p1.y(),  z, 1.0f,           patch_info.w(),
      max.x(), max.y(), z, 1.0f,           1.0f,
  };
  // clang-format on
  Mesh::RenderArray(kTriangles, 6 * 9, format, sizeof(float) * 5,
                    reinterpret_cast<const char *>(vertices), indices);
}
vec3 randomHemispherePoint(vec4 normal) {
	return randomHemispherePoint(normal.dehomogenize());
}
vec4 randomCirclePoint(vec4 normal) {
	return randomCirclePoint(normal.dehomogenize());
}
void Graphics::setFloat4(ConstantLocation position, vec4 value) {
	setFloat4(position, value.x(), value.y(), value.z(), value.w());
}
vec3 barycentric(vec4 v1, vec4 v2, vec4 v3, vec4 hitPoint) {
	return barycentric(v1.dehomogenize(), v2.dehomogenize(), v3.dehomogenize(), hitPoint.dehomogenize());
}
Beispiel #18
0
void Bounds::addPoint(const vec4& v) {
    addPoint(v.xyz());
}
Beispiel #19
0
void
RefractPrivate::draw()
{
    // To perform the depth pass, set up the model-view transformation so
    // that we're looking at the horse from the light position.  That will
    // give us the appropriate view for the shadow.
    modelview_.push();
    modelview_.loadIdentity();
    modelview_.lookAt(lightPosition.x(), lightPosition.y(), lightPosition.z(),
                      0.0, 0.0, 0.0,
                      0.0, 1.0, 0.0);
    modelview_.rotate(rotation_, 0.0f, 1.0f, 0.0f);
    if (orientModel_)
    {
        modelview_.rotate(orientationAngle_, orientationVec_.x(), orientationVec_.y(), orientationVec_.z());
    }
    mat4 mvp(projection_.getCurrent());
    mvp *= modelview_.getCurrent();
    modelview_.pop();

    // Enable the depth render target with our transformation and render.
    depthTarget_.enable(mvp);
    vector<GLint> attrib_locations;
    attrib_locations.push_back(depthTarget_.program()["position"].location());
    attrib_locations.push_back(depthTarget_.program()["normal"].location());
    mesh_.set_attrib_locations(attrib_locations);
    if (useVbo_) {
        mesh_.render_vbo();
    }
    else {
        mesh_.render_array();
    }
    depthTarget_.disable();

    // Draw the "normal" view of the horse
    modelview_.push();
    modelview_.translate(-centerVec_.x(), -centerVec_.y(), -(centerVec_.z() + 2.0 + radius_));
    modelview_.rotate(rotation_, 0.0f, 1.0f, 0.0f);
    if (orientModel_)
    {
        modelview_.rotate(orientationAngle_, orientationVec_.x(), orientationVec_.y(), orientationVec_.z());
    }
    mvp = projection_.getCurrent();
    mvp *= modelview_.getCurrent();

    program_.start();
    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, depthTarget_.depthTexture());
    program_["DistanceMap"] = 0;
    glActiveTexture(GL_TEXTURE1);
    glBindTexture(GL_TEXTURE_2D, depthTarget_.colorTexture());
    program_["NormalMap"] = 1;
    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D, texture_);
    program_["ImageMap"] = 2;
    // Load both the modelview*projection as well as the modelview matrix itself
    program_["ModelViewProjectionMatrix"] = mvp;
    program_["ModelViewMatrix"] = modelview_.getCurrent();
    // Load the NormalMatrix uniform in the shader. The NormalMatrix is the
    // inverse transpose of the model view matrix.
    mat4 normal_matrix(modelview_.getCurrent());
    normal_matrix.inverse().transpose();
    program_["NormalMatrix"] = normal_matrix;
    program_["LightMatrix"] = light_;
    attrib_locations.clear();
    attrib_locations.push_back(program_["position"].location());
    attrib_locations.push_back(program_["normal"].location());
    mesh_.set_attrib_locations(attrib_locations);
    if (useVbo_) {
        mesh_.render_vbo();
    }
    else {
        mesh_.render_array();
    }

    // Per-frame cleanup
    modelview_.pop();
}
	vec4 operator/(vec4 left, const vec4& right)
	{
		return left.Divide(right);
	}
Ray::Ray(vec4 a, vec4 b, int i) {
	pos = a;
	dir = b.normalize();
	lastIndex = i;
}
	vec4 operator-(vec4 left, const vec4& right)
	{
		return left.Subtract(right);
	}
	vec4 operator*(vec4 left, const vec4& right)
	{
		return left.Multiply(right);
	}
	vec4 operator+(vec4 left, const vec4& right)
	{
		return left.Add(right);
	}
Beispiel #25
0
MeshGeometry MeshGeometry::clip(const vec4& clipplane, double epsilon) {
    // Clip all faces...
    for (iterator it = begin(); it != end(); ++it)
        it->clip(clipplane, epsilon);

    // Remove empty faces...
    for (size_t i = 0; i < faces_.size(); ++i) {
        // Is face empty?
        if (faces_.at(i).getVertexCount() < 3)
            faces_.erase(faces_.begin() + i--);
    }

    // Close convex polyhedron if necessary...
    typedef std::pair<VertexGeometry, VertexGeometry> EdgeType;
    typedef std::vector<EdgeType> EdgeListType;
    typedef std::vector<VertexGeometry> VertexListType;

    EdgeListType edgeList;
    FaceGeometry closingFace;

    // Search all face edges on the clipping plane...
    for (size_t i = 0; i < faces_.size(); ++i) {
        FaceGeometry face = faces_.at(i);

        VertexListType verticesOnClipplane;

        for (size_t j = 0; j < face.getVertexCount(); ++j) {
            if (face.getVertex(j).getDistanceToPlane(clipplane, epsilon) == 0)
                verticesOnClipplane.push_back(face.getVertex(j));

            // Is face in the same plane as the clipping plane?
            if (verticesOnClipplane.size() > 2)
                break;
        }

        // Does one face edge corresponds with clipping plane?
        if (verticesOnClipplane.size() == 2)
            edgeList.push_back(std::make_pair(verticesOnClipplane[0], verticesOnClipplane[1]));
    }

    // Is closing necessary?
    if (edgeList.size() > 1) {
        // Sort edges to produce contiguous vertex order...
        bool reverseLastEdge = false;
        for (size_t i = 0; i < edgeList.size() - 1; ++i) {
            for (size_t j = i + 1; j < edgeList.size(); ++j) {
                VertexGeometry connectionVertex;
                if (reverseLastEdge)
                    connectionVertex = edgeList.at(i).first;
                else
                    connectionVertex = edgeList.at(i).second;

                if (edgeList.at(j).first.equals(connectionVertex, epsilon)) {
                    std::swap(edgeList.at(i + 1), edgeList.at(j));
                    reverseLastEdge = false;
                    break;
                }
                else if (edgeList.at(j).second.equals(connectionVertex, epsilon)) {
                    std::swap(edgeList.at(i + 1), edgeList.at(j));
                    reverseLastEdge = true;
                    break;
                }
            }
        }

        // Convert sorted edge list to sorted vertex list...
        VertexListType closingFaceVertices;
        for (size_t i = 0; i < edgeList.size(); ++i) {
            bool reverseEdge = i != 0 && !closingFaceVertices.at(closingFaceVertices.size() - 1).equals(edgeList.at(i).first);

            VertexGeometry first = (reverseEdge ? edgeList.at(i).second : edgeList.at(i).first);
            VertexGeometry second = (reverseEdge ? edgeList.at(i).first : edgeList.at(i).second);

            if (i == 0)
                closingFaceVertices.push_back(first);
            else
                closingFaceVertices.at(closingFaceVertices.size() - 1).combine(first);

            if (i < (edgeList.size() - 1))
                closingFaceVertices.push_back(second);
            else
                closingFaceVertices[0].combine(second);
        }

        // Convert vertex order to counter clockwise if necessary...
        vec3 closingFaceNormal(0, 0, 0);
        for (size_t i = 0; i < closingFaceVertices.size(); ++i)
            closingFaceNormal += tgt::cross(closingFaceVertices[i].getCoords(), closingFaceVertices[(i + 1) % closingFaceVertices.size()].getCoords());
        closingFaceNormal = tgt::normalize(closingFaceNormal);

        if (tgt::dot(clipplane.xyz(), closingFaceNormal) < 0)
            std::reverse(closingFaceVertices.begin(), closingFaceVertices.end());

        // Close convex polyhedron...
        for (VertexListType::iterator it = closingFaceVertices.begin(); it != closingFaceVertices.end(); ++it) {
            // TODO(b_bolt01): Remove debug message...
            //std::cout << " cfv " << it->getCoords() << std::endl;
            closingFace.addVertex(*it);
        }
        addFace(closingFace);
    }

    // If there is only the clipplane left, erase it also...
    if (faces_.size() == 1)
        faces_.clear();

    MeshGeometry closingMesh;
    if (closingFace.getVertexCount() > 0)
        closingMesh.addFace(closingFace);
    return closingMesh;
}
Beispiel #26
0
		virtual void draw()
		{			
			glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
			RENDERER.identity_matrix();			
			
			gluLookAt(eye.x, eye.y, eye.z, 
					  0.0f, 0.0f, 0.0f,
					  0.0f, 1.0f, 0.0f);

			if (key_state('F')) ang += 0.2f;			
			if (key_state('V'))	ang -= 0.2f;

			if (key_state('Z')) 
			{
				Lightangx += 0.1f;
				pos.x = 8.0f * sinf(Lightangx);
				pos.z = 8.0f * cosf(Lightangx);
				
				LightDir.x = pos.x;
				LightDir.y = pos.y;
				LightDir.z = pos.z;
				LightDir.w = 1.0f;
				LightDir.normalize();
			}
			if (key_state('X')) 
			{
				Lightangx -= 0.1f;			
				pos.x = 8.0f * sinf(Lightangx);
				pos.z = 8.0f * cosf(Lightangx);
				LightDir.x = pos.x;
				LightDir.y = pos.y;
				LightDir.z = pos.z;
				LightDir.w = 1.0f;
				LightDir.normalize();
			}				

			if (key_state('D'))		
			{
				pos.y += 0.2f;			
				LightDir.x = pos.x;
				LightDir.y = pos.y;
				LightDir.z = pos.z;
				LightDir.w = 1.0f;
				LightDir.normalize();
			}				
			if (key_state('C'))		
			{
				pos.y -= 0.2f;	
				LightDir.x = pos.x;
				LightDir.y = pos.y;
				LightDir.z = pos.z;
				LightDir.w = 1.0f;
				LightDir.normalize();
			}
			
			if (key_state('Q'))		RENDERER.polygon_draw_mode(RWPS_FRONT_AND_BACK, RWPDM_LINES);
			if (key_state('W'))		RENDERER.polygon_draw_mode(RWPS_FRONT_AND_BACK, RWPDM_FILL);
					  			
			FONTMANAGER["fps"].print(-400.0f, 330.0f, fpsstr, color4::WHITE4);						  			
								
			CGPROGRAMMANAGER["basicV"].enable_profile();			
			CGPROGRAMMANAGER["basicV"].bind_program();				
																												
			CGPROGRAMMANAGER["basicF"].enable_profile();			
			CGPROGRAMMANAGER["basicF"].bind_program();							
												
			CGPROGRAMMANAGER["basicV"].set_parameter("EyePos", eye);												
			CGPROGRAMMANAGER["basicV"].set_parameter("LightVec", LightDir);	
								
			CGPROGRAMMANAGER["basicF"].set_texture("tex0", aniso.ID());		
						
			RENDERER.disable(RWS_TEXTURE_2D);
			glPointSize(4.0f);
			glBegin(GL_POINTS);
				glVertex3f(pos.x, pos.y, pos.z);								
			glEnd();
			RENDERER.enable(RWS_TEXTURE_2D);

			RENDERER.bind_texture(RWTT_TEXTURE_2D, aniso.ID());
			glPushMatrix();	
				glRotatef(ang, 1.0f, 0.0f, 0.0f);
				
				CGPROGRAMMANAGER["basicV"].set_renderer_matrix("WorldViewProj");	
				CGPROGRAMMANAGER["basicV"].set_world_inverted_transposed_matrix("WorldIT");
				CGPROGRAMMANAGER["basicV"].set_world_transposed_matrix("World");
																		
				msh.draw(CGPROGRAMMANAGER["basicV"]);				
			glPopMatrix();
					
			CGPROGRAMMANAGER["basicV"].disable_profile();			
			CGPROGRAMMANAGER["basicF"].disable_profile();			
		}
Beispiel #27
0
double vec4::operator<=(const vec4& v)
{
	return this->length()<=v.length();
}
Beispiel #28
0
bool
RefractPrivate::setup(map<string, Scene::Option>& options)
{
    // Program object setup
    static const string vtx_shader_filename(GLMARK_DATA_PATH"/shaders/light-refract.vert");
    static const string frg_shader_filename(GLMARK_DATA_PATH"/shaders/light-refract.frag");
    static const vec4 lightColor(0.4, 0.4, 0.4, 1.0);

    ShaderSource vtx_source(vtx_shader_filename);
    ShaderSource frg_source(frg_shader_filename);

    frg_source.add_const("LightColor", lightColor);
    frg_source.add_const("LightSourcePosition", lightPosition);
    float refractive_index(Util::fromString<float>(options["index"].value));
    frg_source.add_const("RefractiveIndex", refractive_index);

    if (!Scene::load_shaders_from_strings(program_, vtx_source.str(), frg_source.str())) {
        return false;
    }

    const string& whichTexture(options["texture"].value);
    if (!Texture::load(whichTexture, &texture_, GL_LINEAR, GL_LINEAR, 0))
        return false;

    // Model setup
    Model model;
    const string& whichModel(options["model"].value);
    bool modelLoaded = model.load(whichModel);

    if(!modelLoaded)
        return false;

    // Now that we're successfully loaded, there are a few quirks about
    // some of the known models that we need to account for.  The draw
    // logic for the scene wants to rotate the model around the Y axis.
    // Most of our models are described this way.  Some need adjustment
    // (an additional rotation that gets the model into the correct
    // orientation).
    //
    // Here's a summary:
    //
    // Angel rotates around the Y axis
    // Armadillo rotates around the Y axis
    // Buddha rotates around the X axis
    // Bunny rotates around the Y axis
    // Dragon rotates around the X axis
    // Horse rotates around the Y axis
    if (whichModel == "buddha" || whichModel == "dragon")
    {
        orientModel_ = true;
        orientationAngle_ = -90.0;
        orientationVec_ = vec3(1.0, 0.0, 0.0);
    }
    else if (whichModel == "armadillo")
    {
        orientModel_ = true;
        orientationAngle_ = 180.0; 
        orientationVec_ = vec3(0.0, 1.0, 0.0);
    }

    if (model.needNormals())
        model.calculate_normals();

    // Mesh setup
    vector<std::pair<Model::AttribType, int> > attribs;
    attribs.push_back(std::pair<Model::AttribType, int>(Model::AttribTypePosition, 3));
    attribs.push_back(std::pair<Model::AttribType, int>(Model::AttribTypeNormal, 3));
    model.convert_to_mesh(mesh_, attribs);

    useVbo_ = (options["use-vbo"].value == "true");
    bool interleave = (options["interleave"].value == "true");
    mesh_.vbo_update_method(Mesh::VBOUpdateMethodMap);
    mesh_.interleave(interleave);

    if (useVbo_) {
        mesh_.build_vbo();
    }
    else {
        mesh_.build_array();
    }

    // Calculate a projection matrix that is a good fit for the model
    vec3 maxVec = model.maxVec();
    vec3 minVec = model.minVec();
    vec3 diffVec = maxVec - minVec;
    centerVec_ = maxVec + minVec;
    centerVec_ /= 2.0;
    float diameter = diffVec.length();
    radius_ = diameter / 2;
    float fovy = 2.0 * atanf(radius_ / (2.0 + radius_));
    fovy /= M_PI;
    fovy *= 180.0;
    float aspect(static_cast<float>(canvas_.width())/static_cast<float>(canvas_.height()));
    projection_.perspective(fovy, aspect, 2.0, 2.0 + diameter);

    // Set up the light matrix with a bias that will convert values
    // in the range of [-1, 1] to [0, 1)], then add in the projection
    // and the "look at" matrix from the light position.
    light_ *= LibMatrix::Mat4::translate(0.5, 0.5, 0.5);
    light_ *= LibMatrix::Mat4::scale(0.5, 0.5, 0.5);
    light_ *= projection_.getCurrent();
    light_ *= LibMatrix::Mat4::lookAt(lightPosition.x(), lightPosition.y(), lightPosition.z(),
                                      0.0, 0.0, 0.0,
                                      0.0, 1.0, 0.0);

    if (!depthTarget_.setup(canvas_.width(), canvas_.height())) {
        Log::error("Failed to set up the render target for the depth pass\n");
        return false;
    }

    return true;
}
Beispiel #29
0
double vec4::operator>(const vec4& v)
{
	return this->length()>v.length();
}
Beispiel #30
0
void
ShadowPrivate::draw()
{
    // To perform the depth pass, set up the model-view transformation so
    // that we're looking at the horse from the light position.  That will
    // give us the appropriate view for the shadow.
    modelview_.push();
    modelview_.loadIdentity();
    modelview_.lookAt(lightPosition.x(), lightPosition.y(), lightPosition.z(),
                      0.0, 0.0, 0.0,
                      0.0, 1.0, 0.0);
    modelview_.rotate(rotation_, 0.0f, 1.0f, 0.0f);
    mat4 mvp(projection_.getCurrent());
    mvp *= modelview_.getCurrent();
    modelview_.pop();

    // Enable the depth render target with our transformation and render.
    depthTarget_.enable(mvp);
    vector<GLint> attrib_locations;
    attrib_locations.push_back(depthTarget_.program()["position"].location());
    attrib_locations.push_back(depthTarget_.program()["normal"].location());
    mesh_.set_attrib_locations(attrib_locations);
    if (useVbo_) {
        mesh_.render_vbo();
    }
    else {
        mesh_.render_array();
    }
    depthTarget_.disable();

    // Ground rendering using the above generated texture...
    ground_.draw();

    // Draw the "normal" view of the horse
    modelview_.push();
    modelview_.translate(-centerVec_.x(), -centerVec_.y(), -(centerVec_.z() + 2.0 + radius_));
    modelview_.rotate(rotation_, 0.0f, 1.0f, 0.0f);
    mvp = projection_.getCurrent();
    mvp *= modelview_.getCurrent();

    program_.start();
    program_["ModelViewProjectionMatrix"] = mvp;

    // Load the NormalMatrix uniform in the shader. The NormalMatrix is the
    // inverse transpose of the model view matrix.
    LibMatrix::mat4 normal_matrix(modelview_.getCurrent());
    normal_matrix.inverse().transpose();
    program_["NormalMatrix"] = normal_matrix;
    attrib_locations.clear();
    attrib_locations.push_back(program_["position"].location());
    attrib_locations.push_back(program_["normal"].location());
    mesh_.set_attrib_locations(attrib_locations);
    if (useVbo_) {
        mesh_.render_vbo();
    }
    else {
        mesh_.render_array();
    }

    // Per-frame cleanup
    modelview_.pop();
}