//----------------------------------------------------------------------------
void EffectModel::GenMesh ()
{
	StandardMesh stdMesh(GetVertexFormat(), false);

	TriMeshPtr mesh;

	if (MT_SPHERE == mModelType)
	{
		mesh = stdMesh.Sphere(mZSample, mRadiusSample, GetEmitSizeX());
	}
	else if (MT_CYLINDEROPEN == mModelType)
	{
		mesh = stdMesh.Cylinder(mZSample, mRadiusSample, GetEmitSizeX(), GetEmitSizeZ(), true);
	}
	else if (MT_MODEL == mModelType)
	{
		if (!mModelFilename.empty())
			mesh = DynamicCast<TriMesh>(PX2_RM.BlockLoadCopy(mModelFilename));
	}

	if (mesh)
	{
		SetVertexBuffer(mesh->GetVertexBuffer());
		SetIndexBuffer(mesh->GetIndexBuffer());

		mInitUVs.clear();
		VertexBufferAccessor vba(GetVertexFormat(), GetVertexBuffer());
		for (int i=0; i<vba.GetNumVertices(); i++)
		{
			Float2 uv = vba.TCoord<Float2>(0, i);
			mInitUVs.push_back(uv);
		}
	}
}
Exemple #2
0
//----------------------------------------------------------------------------
bool ConvexHull3D::OnMouseClick (int button, int state, int x, int y,
    unsigned int modifiers)
{
    WindowApplication3::OnMouseClick(button, state, x, y, modifiers);

    if (button == MOUSE_RIGHT_BUTTON)
    {
        // Convert to right-handed screen coordinates.
        y = GetHeight() - 1 - y;

        APoint origin;
        AVector direction;
        mRenderer->GetPickRay(x, y, origin, direction);
        mPicker.Execute(mTrnNode, origin, direction, 0.0f, Mathf::MAX_REAL);
        if (mPicker.Records.size() > 0)
        {
            const PickRecord& record = mPicker.GetClosestNonnegative();
            TriMeshPtr mesh = StaticCast<TriMesh>(record.Intersected);
            float maxBary = record.Bary[0];
            int index = 0;
            if (record.Bary[1] > maxBary)
            {
                maxBary = record.Bary[1];
                index = 1;
            }
            if (record.Bary[2] > maxBary)
            {
                maxBary = record.Bary[2];
                index = 2;
            }
            int* indices =(int*) mesh->GetIndexBuffer()->GetData();
            sprintf(mFooter, "intr = %d, tri = %d, ver = %d",
                (int)mPicker.Records.size(), record.Triangle,
                indices[3*record.Triangle + index]);
        }
    }

    return true;
}
//----------------------------------------------------------------------------
void OpenGLRenderer::Draw (const PlanarReflection& rkPReflection)
{
    TriMeshPtr spkPlane = rkPReflection.GetPlane();
    NodePtr spkCaster = rkPReflection.GetCaster();

    if ( !m_bCapPlanarReflection )
    {
        // The effect is not supported.  Draw normally without the mirror.
        // The OnDraw calls are necessary to handle culling and camera plane
        // state.
        spkPlane->OnDraw(*this);
        spkCaster->OnDraw(*this);
        return;
    }

    if ( m_bDrawingReflected )
    {
        // Some other object is currently doing a planar reflection.  Do not
        // allow the recursion and just draw normally.
        Renderer::Draw(spkCaster);
        SetState(spkPlane->GetRenderStateArray());
        Draw(*spkPlane);
        return;
    }

    // TO DO:  Support for multiple mirrors could be added here by iterating
    // over the section delimited by START PER-MIRROR and END PER-MIRROR.
    // None of the OpenGL code needs to change, just the mirror-plane data.

    // START PER-MIRROR

    // enable depth buffering
    glEnable(GL_DEPTH_TEST);
    glDepthFunc(GL_LESS);
    glDepthMask(GL_TRUE);

    // Step 1 setup and render.
    // Render the mirror into the stencil plane (but no color).  All visible
    // mirror pixels will have the stencil value of the mirror.
    // Make sure that no pixels are written to the depth buffer or color
    // buffer, but use depth buffer testing so that the stencil will not
    // be written where the plane is behind something already in the
    // depth buffer.
    glEnable(GL_STENCIL_TEST);
    glStencilFunc(GL_ALWAYS,rkPReflection.GetStencilValue(),~0);
    glStencilOp(GL_KEEP,GL_KEEP,GL_REPLACE);
    glStencilMask(~0);
    glColorMask(GL_FALSE,GL_FALSE,GL_FALSE,GL_FALSE);
    glDepthMask(GL_FALSE);
    Draw(*spkPlane);


    // Step 2 setup and render.
    // Render the mirror plane again by only processing pixels where
    // the stencil buffer contains the reference value.  This time
    // there is no changes to the stencil buffer and the depth buffer value
    // is reset to the far view clipping plane (this is done by setting the
    // range of depth values in the viewport volume to be [1,1].  Since the
    // mirror plane cannot also be semi-transparent, then there we do not
    // care what is behind the mirror plane in the depth buffer.  We need
    // to move the depth buffer values back where the mirror plane will
    // be rendered so that when the reflected caster is rendered
    // it can be depth buffered correctly (note that the rendering
    // of the reflected caster will cause depth value to be written
    // which will appear to be behind the mirror plane).  Enable writes
    // to the color buffer.  Later when we want to render the reflecting
    // plane and have it blend with the background (which should contain
    // the reflected caster), we want to use the same blending function
    // so that the pixels where the reflected caster was not rendered
    // will contain the reflecting plane and in that case, the blending
    // result will have the reflecting plane appear to be opaque when
    // in reality it was blended with blending coefficients adding to one.
    SetState(spkPlane->GetRenderStateArray());
    glDepthRange(1.0,1.0);
    glDepthFunc(GL_ALWAYS);
    glStencilFunc(GL_EQUAL,rkPReflection.GetStencilValue(),~0);
    glStencilOp(GL_KEEP,GL_KEEP,GL_KEEP);
    glStencilMask(~0);
    glColorMask(GL_TRUE,GL_TRUE,GL_TRUE,GL_TRUE);
    glDepthMask(GL_TRUE);
    Draw(*spkPlane);


    // Step 2 cleanup.
    // Restore the depth range and depth testing function.
    glDepthFunc(GL_LESS);
    glDepthRange(0.0,1.0);


    // Step 3 setup.
    // We are about to render the reflected caster.  For that, we
    // will need to compute the reflection viewing matrix.
    Vector3f kCurrNormal = spkPlane->WorldRotate()*
        rkPReflection.GetPlaneNormal();
    Vector3f kCurrPoint = spkPlane->WorldTranslate()+spkPlane->WorldScale()*
      (spkPlane->WorldRotate()*rkPReflection.GetPointOnPlane());
    GLdouble adPlaneEq[4] = { -kCurrNormal.X(), -kCurrNormal.Y(),
        -kCurrNormal.Z(), kCurrNormal.Dot(kCurrPoint) };
    adPlaneEq[0] = -adPlaneEq[0];
    adPlaneEq[1] = -adPlaneEq[1];
    adPlaneEq[2] = -adPlaneEq[2];
    adPlaneEq[3] = -adPlaneEq[3];
    GLfloat aafReflectionMatrix[4][4];
    ComputeReflectionMatrix(aafReflectionMatrix,adPlaneEq);

    // Save the modelview transform before replacing it with
    // the viewing transform which will handle the reflection.
    glPushMatrix();
    glMultMatrixf(&aafReflectionMatrix[0][0]);

    // Setup a clip plane so that only objects above the mirror plane
    // get reflected.
    glClipPlane(GL_CLIP_PLANE0,adPlaneEq);
    glEnable(GL_CLIP_PLANE0);


    // Reverse the cull direction.  Allow for models that are not necessarily
    // set up with front or back face culling.
    m_bReverseCullState = true;

    // We do not support mirrors reflecting mirrors.  They just appear as the
    // base color in a reflection.
    m_bDrawingReflected = true;


    // Step 3 render.
    // Render the reflected caster.  Only render where the stencil buffer
    // contains the reference value.  Enable depth testing.  This time
    // allow writes to the color buffer.
    glStencilFunc(GL_EQUAL,rkPReflection.GetStencilValue(),~0);
    glStencilOp(GL_KEEP,GL_KEEP,GL_KEEP);
    glColorMask(GL_TRUE,GL_TRUE,GL_TRUE,GL_TRUE);
    Renderer::Draw(spkCaster);


    // Step 3 cleanup.
    // Restore state.
    m_bDrawingReflected = false;
    m_bReverseCullState = false;
    glDisable(GL_CLIP_PLANE0);
    glPopMatrix();


    // Step 4 setup.
    // We are about to render the reflecting plane again.  Reset to
    // the render state for the reflecting plane.  We want to blend
    // the reflecting plane with what is already in the color buffer
    // where the reflecting plane will be rendered, particularly
    // either the image of the reflected caster or the reflecting
    // plane.  All we want to change about the rendering of the
    // reflecting plane at this stage is to force the alpha channel
    // to always be the reflectance value for the reflecting plane.
    // Render the reflecting plane wherever the stencil buffer is set
    // to the reference value.  This time clear the stencil buffer
    // reference value where it is set.  Perform the normal depth
    // buffer testing and writes.  Allow the color buffer to be
    // written to, but this time blend the relecting plane with
    // the values in the color buffer based on the reflectance value.
    // Note that where the stencil buffer is set, the color buffer
    // has either color values from the reflecting plane or the
    // reflected caster.  Blending will use src=1-alpha (reflecting plane)
    // and dest=alpha background (reflecting plane or reflected caster).
    SetState(spkPlane->GetRenderStateArray());
    glEnable(GL_BLEND);
    glBlendColorEXT(0.0f,0.0f,0.0f,rkPReflection.GetReflectance());
    glBlendFunc(GL_ONE_MINUS_CONSTANT_ALPHA_EXT,GL_CONSTANT_ALPHA_EXT);
    glStencilFunc(GL_EQUAL,rkPReflection.GetStencilValue(),~0);
    glStencilOp(GL_KEEP,GL_KEEP,GL_INVERT);
    glColorMask(GL_TRUE,GL_TRUE,GL_TRUE,GL_TRUE);
    Draw(*spkPlane);


    // Step 4 cleanup.
    glDisable(GL_BLEND);
    glDisable(GL_STENCIL_TEST);

    // END PER-MIRROR

    // render the objects as usual
    Renderer::Draw(spkCaster);
}
//----------------------------------------------------------------------------
void ClodMeshes::CreateScene ()
{
    mScene = new0 Node();
    mTrnNode = new0 Node();
    mScene->AttachChild(mTrnNode);
    mWireState = new0 WireState();
    mRenderer->SetOverrideWireState(mWireState);

    // Load the face model.
#ifdef WM5_LITTLE_ENDIAN
    std::string path = Environment::GetPathR("FacePN.wmof");
#else
    std::string path = Environment::GetPathR("FacePN.be.wmof");
#endif
    InStream inStream;
    inStream.Load(path);
    TriMeshPtr mesh = StaticCast<TriMesh>(inStream.GetObjectAt(0));
    VertexBufferAccessor vba0(mesh);

    // Remove the normals and add texture coordinates.
    VertexFormat* vformat = VertexFormat::Create(2,
        VertexFormat::AU_POSITION, VertexFormat::AT_FLOAT3, 0,
        VertexFormat::AU_TEXCOORD, VertexFormat::AT_FLOAT2, 0);
    int vstride = vformat->GetStride();

    VertexBuffer* vbuffer = new0 VertexBuffer(vba0.GetNumVertices(), vstride);
    VertexBufferAccessor vba1(vformat, vbuffer);

    float xmin = Mathf::MAX_REAL, xmax = -Mathf::MAX_REAL;
    float ymin = Mathf::MAX_REAL, ymax = -Mathf::MAX_REAL;
    int i;
    for (i = 0; i < vba0.GetNumVertices(); ++i)
    {
        Float3 position = vba0.Position<Float3>(i);
        vba1.Position<Float3>(i) = position;

        float x = position[0];
        float y = position[2];
        vba1.TCoord<Float2>(0, i) = Float2(x, y);

        if (x < xmin)
        {
            xmin = x;
        }
        if (x > xmax)
        {
            xmax = x;
        }
        if (y < ymin)
        {
            ymin = y;
        }
        if (y > ymax)
        {
            ymax = y;
        }
    }

    float xmult = 1.0f/(xmax - xmin);
    float ymult = 1.0f/(ymax - ymin);
    for (i = 0; i < vba1.GetNumVertices(); ++i)
    {
        Float2 tcoord = vba1.TCoord<Float2>(0, i);
        vba1.TCoord<Float2>(0,i) = Float2(
            (tcoord[0] - xmin)*xmult,
            (tcoord[1] - ymin)*ymult);
    }

    mesh->SetVertexFormat(vformat);
    mesh->SetVertexBuffer(vbuffer);

    // Create a texture for the face.  Use the generated texture coordinates.
    Texture2DEffect* effect = new0 Texture2DEffect(Shader::SF_LINEAR);
    path = Environment::GetPathR("Magician.wmtf");
    Texture2D* texture = Texture2D::LoadWMTF(path);

#ifdef USE_CLOD_MESH
    // Create the collapse records to be shared by two CLOD meshes.
    int numRecords = 0;
    CollapseRecord* records = 0;
    CreateClodMesh ccm(mesh, numRecords, records);
    CollapseRecordArray* recordArray = new0 CollapseRecordArray(numRecords,
        records);

    mClod[0] = new0 ClodMesh(mesh, recordArray);
    mClod[0]->LocalTransform = mesh->LocalTransform;
    mClod[0]->LocalTransform.SetTranslate(mesh->LocalTransform.GetTranslate()
        - 150.0f*AVector::UNIT_X);
    mClod[0]->SetEffectInstance(effect->CreateInstance(texture));
    mTrnNode->AttachChild(mClod[0]);

    mClod[1] = new0 ClodMesh(mesh, recordArray);
    mClod[1]->LocalTransform = mesh->LocalTransform;
    mClod[1]->LocalTransform.SetTranslate(mesh->LocalTransform.GetTranslate()
        + 150.0f*AVector::UNIT_X - 100.0f*AVector::UNIT_Y);
    mClod[1]->SetEffectInstance(effect->CreateInstance(texture));
    mTrnNode->AttachChild(mClod[1]);

    mActive = mClod[0];
#else
    IndexBuffer* ibuffer = mesh->GetIndexBuffer();
    TriMesh* face = new0 TriMesh(vformat, vbuffer,ibuffer);
    face->LocalTransform = mesh->LocalTransform;
    face->LocalTransform.SetTranslate(mesh->LocalTransform.GetTranslate() -
        150.0f*AVector::UNIT_X);
    face->SetEffectInstance(effect->CreateInstance(texture));
    mTrnNode->AttachChild(face);

    face = new0 TriMesh(vformat, vbuffer, ibuffer);
    face->LocalTransform = mesh->LocalTransform;
    face->LocalTransform.SetTranslate(mesh->LocalTransform.GetTranslate() +
        150.0f*AVector::UNIT_X);
    face->SetEffectInstance(effect->CreateInstance(texture));
    mTrnNode->AttachChild(face);
#endif
}
Exemple #5
0
//----------------------------------------------------------------------------
void RenderToTexture::CreateScene ()
{
	// Create the root of the scene.
	mScene = new0 Node();
	mTrnNode = new0 Node();
	mScene->AttachChild(mTrnNode);
	mWireState = new0 WireState();
	mRenderer->SetOverrideWireState(mWireState);

	// Create a screen-space camera to use with the render target.
	mScreenCamera = ScreenTarget::CreateCamera();

	// Create a screen polygon to use with the render target.
	VertexFormat* vformat = VertexFormat::Create(2,
	                        VertexFormat::AU_POSITION, VertexFormat::AT_FLOAT3, 0,
	                        VertexFormat::AU_TEXCOORD, VertexFormat::AT_FLOAT2, 0);

	const int rtWidth = 256, rtHeight = 256;
	mScreenPolygon = ScreenTarget::CreateRectangle(vformat, rtWidth, rtHeight,
	                 0.0f, 0.2f, 0.0f, 0.2f, 0.0f);

	// Create the render target.
	//Texture::Format tformat = Texture::TF_A8B8G8R8;  // DX9 fails
	Texture::Format tformat = Texture::TF_A8R8G8B8;
	//Texture::Format tformat = Texture::TF_A16B16G16R16;
	//Texture::Format tformat = Texture::TF_A16B16G16R16F;
	//Texture::Format tformat = Texture::TF_A32B32G32R32F;
	mRenderTarget = new0 RenderTarget(1, tformat, rtWidth, rtHeight, false,
	                                  false);

	// Attach the render target texture to the screen polygon mesh.
	mScreenPolygon->SetEffectInstance(Texture2DEffect::CreateUniqueInstance(
	                                      mRenderTarget->GetColorTexture(0), Shader::SF_LINEAR,
	                                      Shader::SC_CLAMP_EDGE, Shader::SC_CLAMP_EDGE));

	// Load the face model and use multitexturing.
#ifdef WM5_LITTLE_ENDIAN
	std::string path = Environment::GetPathR("FacePN.wmof");
#else
	std::string path = Environment::GetPathR("FacePN.be.wmof");
#endif
	InStream inStream;
	inStream.Load(path);
	TriMeshPtr mesh = DynamicCast<TriMesh>(inStream.GetObjectAt(0));

	// Create texture coordinates for the face.  Based on knowledge of the
	// mesh, the (x,z) values of the model-space vertices may be mapped to
	// (s,t) in [0,1]^2.
	VertexBufferAccessor vba0(mesh);
	const int numVertices = vba0.GetNumVertices();
	float xmin = Mathf::MAX_REAL, xmax = -Mathf::MAX_REAL;
	float zmin = Mathf::MAX_REAL, zmax = -Mathf::MAX_REAL;
	int i;
	for (i = 1; i < numVertices; ++i)
	{
		Float3 position = vba0.Position<Float3>(i);
		float x = position[0];
		if (x < xmin)
		{
			xmin = x;
		}
		if (x > xmax)
		{
			xmax = x;
		}

		float z = position[2];
		if (z < zmin)
		{
			zmin = z;
		}
		if (z > zmax)
		{
			zmax = z;
		}
	}
	float invXRange = 1.0f/(xmax - xmin);
	float invZRange = 1.0f/(zmax - zmin);

	// Strip out the normal vectors, because there is no lighting in this
	// sample.  Add in two texture coordinate channels for a multiplicative
	// texture effect.
	vformat = VertexFormat::Create(3,
	                               VertexFormat::AU_POSITION, VertexFormat::AT_FLOAT3, 0,
	                               VertexFormat::AU_TEXCOORD, VertexFormat::AT_FLOAT2, 0,
	                               VertexFormat::AU_TEXCOORD, VertexFormat::AT_FLOAT2, 1);
	int vstride = vformat->GetStride();

	VertexBuffer* vbuffer = new0 VertexBuffer(numVertices, vstride);
	VertexBufferAccessor vba1(vformat, vbuffer);
	for (i = 0; i < numVertices; ++i)
	{
		Float3 position = vba0.Position<Float3>(i);
		Float2 tcoord(
		    (position[0] - xmin)*invXRange,
		    (position[2] - zmin)*invZRange);

		vba1.Position<Float3>(i) = position;
		vba1.TCoord<Float2>(0, i) = tcoord;
		vba1.TCoord<Float2>(1, i) = tcoord;
	}
	mesh->SetVertexFormat(vformat);
	mesh->SetVertexBuffer(vbuffer);

	path = Environment::GetPathR("Leaf.wmtf");
	Texture2D* texture0 = Texture2D::LoadWMTF(path);
	path = Environment::GetPathR("Water.wmtf");
	Texture2D* texture1 = Texture2D::LoadWMTF(path);
	VisualEffectInstance* instance = Texture2AddEffect::CreateUniqueInstance(
	                                     texture0, Shader::SF_LINEAR, Shader::SC_CLAMP_EDGE,
	                                     Shader::SC_CLAMP_EDGE, texture1, Shader::SF_LINEAR,
	                                     Shader::SC_CLAMP_EDGE, Shader::SC_CLAMP_EDGE);

	mesh->SetEffectInstance(instance);

	mTrnNode->AttachChild(mesh);
}
Exemple #6
0
//Load TSM file
GLC_World GLWidget::loadTSMFile( const QString &filename )
{
	RhBuilderPtr reader = makePtr<RhBuilder>(filename.toStdString());
	TSplinePtr spline = reader->findTSpline();

	GLC_World w;

	IndexList face;
	QList<float> vertex;
	QList<float> normal;

	TTessellator tessellator(spline);

	TImagePtr image = spline->getTImage();

	// Go through all the faces in TImage and create abjects 
	TFacVIterator fiter = image->faceIteratorBegin();
	for (;fiter!=image->faceIteratorEnd();fiter++)
	{
		TFacePtr tface = *fiter;

		TriMeshPtr trimesh = tessellator.interpolateFace(tface);

		P3dVIterator piter = trimesh->pointIteratorBegin();
		for (piter;piter!=trimesh->pointIteratorEnd();piter++)
		{
			vertex.push_back((*piter)->x());
			vertex.push_back((*piter)->y());
			vertex.push_back((*piter)->z());
		}

		N3dVIterator niter = trimesh->normalIteratorBegin();
		for (;niter!=trimesh->normalIteratorEnd();niter++)
		{
			normal.push_back((*niter)->i());
			normal.push_back((*niter)->j());
			normal.push_back((*niter)->k());
		}

		TriVIterator titer = trimesh->triangleIteratorBegin();
		for (;titer!=trimesh->triangleIteratorEnd();titer++)
		{
			face.push_back((*titer)->point_indices[0]);
			face.push_back((*titer)->point_indices[1]);
			face.push_back((*titer)->point_indices[2]);
		}

		GLC_Mesh* glc_mesh = new GLC_Mesh();
		glc_mesh->addTriangles(0,face);
		face.clear();
		glc_mesh->addVertice(vertex.toVector());
		vertex.clear();
		glc_mesh->addNormals(normal.toVector());
		normal.clear();
		glc_mesh->finish();

		GLC_3DRep *rep = new GLC_3DRep(glc_mesh);
		glc_mesh = NULL;
		
		// Set the material 
		GLC_Material* pCurrentMat= NULL;
		pCurrentMat= rep->geomAt(0)->firstMaterial();
		pCurrentMat->setAmbientColor(Qt::gray);
		pCurrentMat->setDiffuseColor(Qt::gray);

		// Add objects (faces) to the world collection 
		w.collection()->add(*rep);
	}
	return w;
}