Esempio n. 1
0
/*!****************************************************************************
 @Function		DrawSkybox
 @Description	Draws the skybox onto the screen.
******************************************************************************/
void OGLES2Glass::DrawSkybox()
{
	glUseProgram(m_SkyboxProgram.uiId);

	PVRTMat4 mVP = m_mProjection * m_mView;
	PVRTMat4 mInvVP = mVP.inverseEx();

	glUniformMatrix4fv(m_SkyboxProgram.auiLoc[eInvVPMatrix], 1, GL_FALSE, mInvVP.ptr());

	PVRTVec3 vEyePos = m_mView.inverse() * PVRTVec4(0, 0, 0, 1);

	glUniform3fv(m_SkyboxProgram.auiLoc[eEyePos], 1, vEyePos.ptr());

	glBindBuffer(GL_ARRAY_BUFFER, m_uiSquareVbo);

	glEnableVertexAttribArray(VERTEX_ARRAY);
	glVertexAttribPointer(VERTEX_ARRAY, 3, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 3, 0);

	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_CUBE_MAP, m_uiCubeTex);

	glDrawArrays(GL_TRIANGLES, 0, 6);

	glDisableVertexAttribArray(VERTEX_ARRAY);

	glBindBuffer(GL_ARRAY_BUFFER, 0);
}
Esempio n. 2
0
/*!****************************************************************************
 @Function		RenderFloor
 @Description	Renders the floor as a quad.
******************************************************************************/
void OGLES2ParticleSystem::RenderFloor()
{
	glUseProgram(m_SimpleShaderProgram.uiId);

	PVRTMat3 mViewIT(m_mView.inverse().transpose());
	glUniformMatrix4fv(m_SimpleShaderProgram.iModelViewProjectionMatrixLoc, 1, GL_FALSE, m_mViewProjection.f);
	glUniformMatrix4fv(m_SimpleShaderProgram.iModelViewMatrixLoc, 1, GL_FALSE, m_mView.f);
	glUniformMatrix3fv(m_SimpleShaderProgram.iModelViewITMatrixLoc, 1, GL_FALSE, mViewIT.f);

	PVRTVec3 vLightPosition = m_mView * PVRTVec4(g_caLightPosition, 1.0f);
	glUniform3fv(m_SimpleShaderProgram.iLightPosition, 1, &vLightPosition.x);

	// Enable vertex arributes
	glEnableVertexAttribArray(VERTEX_ARRAY);
	glEnableVertexAttribArray(NORMAL_ARRAY);

	PVRTVec2 minCorner(-100.0f, -100.0f);
	PVRTVec2 maxCorner( 100.0f,  100.0f);

	const float afVertexData[] = { minCorner.x, 0.0f, minCorner.y,  maxCorner.x, 0.0f, minCorner.y,  
		                           minCorner.x, 0.0f, maxCorner.y,  maxCorner.x, 0.0f, maxCorner.y };
	const float afNormalData[] = { 0.0f, 1.0f, 0.0f,  0.0f, 1.0f, 0.0f,  0.0f, 1.0f, 0.0f,  0.0f, 1.0f, 0.0f };

	glVertexAttribPointer(VERTEX_ARRAY, 3, GL_FLOAT, GL_FALSE, 0, afVertexData);
	glVertexAttribPointer(NORMAL_ARRAY, 3, GL_FLOAT, GL_FALSE, 0, afNormalData);

	// Draw the quad
	glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

		// Safely disable the vertex attribute arrays
	glDisableVertexAttribArray(VERTEX_ARRAY);
	glDisableVertexAttribArray(NORMAL_ARRAY);	
}
Esempio n. 3
0
/*!****************************************************************************
 @Function		DrawScene
 @Input			bLight	If true then the scene is drawn lit, otherwise it isn't
 @Description	Draws the scene
******************************************************************************/
void OGLES3ShadowVolumes::DrawScene()
{
	SPODNode* pNode;
	PVRTMat4 mWorld;
	PVRTMat4 mModelView, mMVP;

	// Use the shader program for the scene
	glUseProgram(m_BaseShader.uiId);

	// Go through the meshes drawing each one
	for(unsigned int i = 0; i < m_Scene.nNumMeshNode; ++i)
	{
		pNode = &m_Scene.pNode[i];

		// Get the world matrix for this particular node.
		switch(i)
		{
			case eBigCog:
				mWorld = PVRTMat4::RotationZ(m_fBigCogAngle);
			break;
			case eSmallCog:
				mWorld = PVRTMat4::RotationZ(m_fSmallCogAngle);
			break;
			default:
				mWorld = m_Scene.GetWorldMatrix(*pNode);
		}

		// Pass the model-view-projection matrix (MVP) to the shader to transform the vertices
		mMVP = m_mProjection * m_mView * mWorld;
		glUniformMatrix4fv(m_BaseShader.auiLoc[eMVPMatrix], 1, GL_FALSE, mMVP.ptr());

		// Pass the light position in model space to the shader
		PVRTVec4 vLightPosModel;
		vLightPosModel = mWorld.inverse() * m_vLightPosWorld;

		glUniform3fv(m_BaseShader.auiLoc[eLightPosModel], 1, &vLightPosModel.x);

		// Loads the correct texture using our texture lookup table
		glBindTexture(GL_TEXTURE_2D, m_puiTextures[pNode->nIdxMaterial]);

		// Draw the mesh node
		DrawMesh(i);
	}
}
Esempio n. 4
0
void ShaderEnvMap::UseProgram()
{
	Shader::UseProgram();
	
	glUniform1i(myCubeReflection, false);
	
	glUniform1i(my2DMap, 0);
	glUniform1i(myCubeMap, 1);

	CameraManager * pCameraManager = CameraManager::GetCameraManager();
	Camera * pCurrentCamera = pCameraManager->GetCurrentCamera();
	RenderLayerManager & renderManager = RenderLayerManager::GetRenderLayerManager();
	Mesh * pCurrentMesh = renderManager.GetCurrentMesh();

	PVRTMat4 viewMtx(pCurrentCamera->GetViewMtx().f);
	
	static float m_fAngleX = 0.0;
	static float m_fAngleY = 0.0;

	PVRTMat4 mModel, mRotX, mRotY;
	mRotX = PVRTMat4::RotationX(m_fAngleX);
	mRotY = PVRTMat4::RotationY(m_fAngleY);
	
	mModel = mRotY * mRotX;

	m_fAngleX += 0.01f;
	//m_fAngleY += 0.011f;

	//PVRTMat4 meshWorld( pCurrentMesh->GetWorldMtx().f );
	PVRTMat4 meshWorld = mModel;
	PVRTMat4 modelView = viewMtx * meshWorld;
	
	// Set model matrix
	PVRTMat3 model3x3 = PVRTMat3(meshWorld);
	glUniformMatrix3fv( myModelWorld, 1, GL_FALSE, model3x3.ptr());

	// Set eye position in model space
	PVRTVec4 eyePosModel;
	eyePosModel = modelView.inverse() * PVRTVec4(0, 0, 0, 1);

	glUniform3fv(myEyePosModel, 1, &eyePosModel.x);


}
Esempio n. 5
0
/*!****************************************************************************
 @Function		BuildVolume
 @Return		bool		true if no error occured
 @Description	This function will create the volume that we will be drawn
				in the stenciltest.
******************************************************************************/
bool OGLES3ShadowVolumes::BuildVolume(unsigned int ui32ShadowVol, PVRTVec4 *pLightPos)
{
	SPODNode* pNode;
	PVRTMat4 mWorld;
	PVRTVec4 vModelLightPos;

	int i32MeshIndex = m_pui32MeshIndex[ui32ShadowVol];

	pNode = &m_Scene.pNode[i32MeshIndex];

	// Get the world matrix for this particular node.
	switch(i32MeshIndex)
	{
		case eBigCog:
			mWorld = PVRTMat4::RotationZ(m_fBigCogAngle);
		break;
		case eSmallCog:
			mWorld = PVRTMat4::RotationZ(m_fSmallCogAngle);
		break;
		default:
			mWorld = m_Scene.GetWorldMatrix(*pNode);
	}

	/*
		Convert the light position into model space for the current Node.
	*/
	vModelLightPos = mWorld.inverse() * (*pLightPos);

	/*
		Using the light position set up the shadow volume so it can be extruded in the shader.
	*/

	unsigned int ui32Flags = PVRTSHADOWVOLUME_VISIBLE | PVRTSHADOWVOLUME_NEED_CAP_FRONT | PVRTSHADOWVOLUME_NEED_CAP_BACK;
	PVRTShadowVolSilhouetteProjectedBuild(&m_pShadowVol[ui32ShadowVol], ui32Flags , &m_pShadowMesh[ui32ShadowVol], (PVRTVec3*) &vModelLightPos, true);

	return true;
}
Esempio n. 6
0
/*!****************************************************************************
 @Function		RenderSphere
 @Description	Renders a sphere at the specified position.
******************************************************************************/
void OGLES2ParticleSystem::RenderSphere(PVRTVec3 position, float radius)
{
	glUseProgram(m_SimpleShaderProgram.uiId);

	PVRTMat4 mModel = PVRTMat4::Translation(position) * PVRTMat4::Scale(radius, radius, radius);
	PVRTMat4 mModelView = m_mView * mModel;
	PVRTMat4 mModelViewProj = m_mProjection * mModelView;
	PVRTMat3 mModelViewIT(mModelView.inverse().transpose());
	glUniformMatrix4fv(m_SimpleShaderProgram.iModelViewProjectionMatrixLoc, 1, GL_FALSE, mModelViewProj.f);
	glUniformMatrix4fv(m_SimpleShaderProgram.iModelViewMatrixLoc, 1, GL_FALSE, mModelView.f);
	glUniformMatrix3fv(m_SimpleShaderProgram.iModelViewITMatrixLoc, 1, GL_FALSE, mModelViewIT.f);

	PVRTVec3 vLightPosition = m_mView * PVRTVec4(g_caLightPosition, 1.0f);
	glUniform3fv(m_SimpleShaderProgram.iLightPosition, 1, &vLightPosition.x);

	// Enable vertex arributes
	glEnableVertexAttribArray(VERTEX_ARRAY);
	glEnableVertexAttribArray(NORMAL_ARRAY);

	glBindBuffer(GL_ARRAY_BUFFER, m_uiVbo);
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_uiIbo);

	SPODMesh* pMesh = &m_Scene.pMesh[0];
	glVertexAttribPointer(VERTEX_ARRAY, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData);
	glVertexAttribPointer(NORMAL_ARRAY, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData);

	// Indexed Triangle list
	glDrawElements(GL_TRIANGLES, pMesh->nNumFaces*3, GL_UNSIGNED_SHORT, 0);

	// Safely disable the vertex attribute arrays
	glDisableVertexAttribArray(VERTEX_ARRAY);
	glDisableVertexAttribArray(NORMAL_ARRAY);

	glBindBuffer(GL_ARRAY_BUFFER, 0);
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLESIntroducingPFX::RenderScene()
{
	// Clears the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use the loaded effect
	m_pEffect->Activate();

	/*
		Calculates the frame number to animate in a time-based manner.
		Uses the shell function PVRShellGetTime() to get the time in milliseconds.
	*/
	int iTime = PVRShellGetTime();
	int iDeltaTime = iTime - m_iTimePrev;
	m_iTimePrev	= iTime;
	m_fFrame	+= (float)iDeltaTime * DEMO_FRAME_RATE;
	if (m_fFrame > m_Scene.nNumFrame-1)
		m_fFrame = 0;

	// Sets the scene animation to this frame
	m_Scene.SetFrame(m_fFrame);

	{
		PVRTVec3	vFrom, vTo, vUp;
		VERTTYPE	fFOV;
		vUp.x = 0.0f;
		vUp.y = 1.0f;
		vUp.z = 0.0f;

		// We can get the camera position, target and field of view (fov) with GetCameraPos()
		fFOV = m_Scene.GetCameraPos(vFrom, vTo, 0) * 0.4f;

		/*
			We can build the world view matrix from the camera position, target and an up vector.
			For this we use PVRTMat4LookAtRH().
		*/
		m_mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp);

		// Calculates the projection matrix
		bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen);
		m_mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), CAM_NEAR, CAM_FAR, PVRTMat4::OGL, bRotate);
	}

	/*
		A scene is composed of nodes. There are 3 types of nodes:
		- MeshNodes :
			references a mesh in the pMesh[].
			These nodes are at the beginning of the pNode[] array.
			And there are nNumMeshNode number of them.
			This way the .pod format can instantiate several times the same mesh
			with different attributes.
		- lights
		- cameras
		To draw a scene, you must go through all the MeshNodes and draw the referenced meshes.
	*/
	for (int i=0; i<(int)m_Scene.nNumMeshNode; i++)
	{
		SPODNode* pNode = &m_Scene.pNode[i];

		// Gets pMesh referenced by the pNode
		SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx];

		glBindBuffer(GL_ARRAY_BUFFER, m_aiVboID[i]);

		// Gets the node model matrix
		PVRTMat4 mWorld;
		mWorld = m_Scene.GetWorldMatrix(*pNode);

		PVRTMat4 mWorldView;
		mWorldView = m_mView * mWorld;

		for(unsigned int j = 0; j < m_nUniformCnt; ++j)
		{
			switch(m_psUniforms[j].nSemantic)
			{
			case eUsPOSITION:
				{
					glVertexAttribPointer(m_psUniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData);
					glEnableVertexAttribArray(m_psUniforms[j].nLocation);
				}
				break;
			case eUsNORMAL:
				{
					glVertexAttribPointer(m_psUniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData);
					glEnableVertexAttribArray(m_psUniforms[j].nLocation);
				}
				break;
			case eUsUV:
				{
					glVertexAttribPointer(m_psUniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
					glEnableVertexAttribArray(m_psUniforms[j].nLocation);
				}
				break;
			case eUsWORLDVIEWPROJECTION:
				{
					PVRTMat4 mWVP;

					/* Passes the world-view-projection matrix (WVP) to the shader to transform the vertices */
					mWVP = m_mProjection * mWorldView;
					glUniformMatrix4fv(m_psUniforms[j].nLocation, 1, GL_FALSE, mWVP.f);
				}
				break;
			case eUsWORLDVIEWIT:
				{
					PVRTMat4 mWorldViewI, mWorldViewIT;

					/* Passes the inverse transpose of the world-view matrix to the shader to transform the normals */
					mWorldViewI  = mWorldView.inverse();
					mWorldViewIT = mWorldViewI.transpose();

					PVRTMat3 WorldViewIT = PVRTMat3(mWorldViewIT);

					glUniformMatrix3fv(m_psUniforms[j].nLocation, 1, GL_FALSE, WorldViewIT.f);
				}
				break;
			case eUsLIGHTDIREYE:
				{
					// Reads the light direction from the scene.
					PVRTVec4 vLightDirection;
					PVRTVec3 vPos;
					vLightDirection = m_Scene.GetLightDirection(0);

					vLightDirection.x = -vLightDirection.x;
					vLightDirection.y = -vLightDirection.y;
					vLightDirection.z = -vLightDirection.z;

					/*
						Sets the w component to 0, so when passing it to glLight(), it is
						considered as a directional light (as opposed to a spot light).
					*/
					vLightDirection.w = 0;

					// Passes the light direction in eye space to the shader
					PVRTVec4 vLightDirectionEyeSpace;
					vLightDirectionEyeSpace = m_mView * vLightDirection;

					glUniform3f(m_psUniforms[j].nLocation, vLightDirectionEyeSpace.x, vLightDirectionEyeSpace.y, vLightDirectionEyeSpace.z);
				}
				break;
			case eUsTEXTURE:
				{
					// Set the sampler variable to the texture unit
					glUniform1i(m_psUniforms[j].nLocation, m_psUniforms[j].nIdx);
				}
				break;
			}
		}

		/*
			Now that the model-view matrix is set and the materials ready,
			call another function to actually draw the mesh.
		*/
		DrawMesh(pMesh);
		glBindBuffer(GL_ARRAY_BUFFER, 0);

		for(unsigned int j = 0; j < m_nUniformCnt; ++j)
		{
			switch(m_psUniforms[j].nSemantic)
			{
			case eUsPOSITION:
				{
					glDisableVertexAttribArray(m_psUniforms[j].nLocation);
				}
				break;
			case eUsNORMAL:
				{
					glDisableVertexAttribArray(m_psUniforms[j].nLocation);
				}
				break;
			case eUsUV:
				{
					glDisableVertexAttribArray(m_psUniforms[j].nLocation);
				}
				break;
			}
		}
	}

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("IntroducingPFX", "", ePVRTPrint3DLogoIMG);
	m_Print3D.Flush();

	return true;
}
Esempio n. 8
0
void Mesh3D::draw(SceneGraph *scene,  Sprite3D *sprite, int min, int max)
{
	
	scene->m_ppEffect[m_shader]->Activate();
	
	if(m_blendEnable)
	{
		glEnable (GL_BLEND);
		glBlendFunc (m_blend1, m_blend2);
	} else 
	{
		glDisable (GL_BLEND);
	}
	
	//glEnable(GL_SAMPLE_COVERAGE);
	//glSampleCoverage(1.0, GL_FALSE);
	
	for(unsigned int j = 0; j < scene->m_pnUniformCnt[m_shader]; ++j)
	{
		//unsigned int location = scene->m_ppsUniforms[m_shader][j].nLocation;
		EUniformSemantic semantic = (EUniformSemantic)scene->m_ppsUniforms[m_shader][j].nSemantic; 
		switch(semantic)
		{
			case eUsMVPMATRIX:
			{
				PVRTMat4 mMVP;
				
				/* Passes the model-view-projection matrix (MVP) to the shader to transform the vertices */
				
				if(useSceneModel)
				{
					mMVP = scene->m_mProjection * scene->m_mModelView * sprite->modelView;
				}
				else 
				{
					mMVP = scene->m_mProjection * sprite->modelView;
				}
				glUniformMatrix4fv(scene->m_ppsUniforms[m_shader][j].nLocation, 1, GL_FALSE, mMVP.f);
			}
				break;
			case eUsMODELVIEW:
			{
				PVRTMat4 MV = useSceneModel ? scene->m_mModelView * sprite->modelView : sprite->modelView  ;
				glUniformMatrix4fv(scene->m_ppsUniforms[m_shader][j].nLocation, 1, GL_FALSE, MV.f);
			}
				break;
			case eUsMODELVIEWIT:
			{
				PVRTMat4 mModelViewI, mModelViewIT;
				PVRTMat4 MV = useSceneModel ?  scene->m_mModelView * sprite->modelView : sprite->modelView ;
				/* Passes the inverse transpose of the model-view matrix to the shader to transform the normals */
				mModelViewI = MV.inverse();
				mModelViewIT= mModelViewI.transpose();
				PVRTMat3 ModelViewIT = PVRTMat3(mModelViewIT);
				
				glUniformMatrix3fv(scene->m_ppsUniforms[m_shader][j].nLocation, 1, GL_FALSE, ModelViewIT.f);
			}
				break;
			case eUsVIEWIT:
			{
				PVRTMat4 mViewI, mViewIT;
				
				/* Passes the inverse transpose of the model-view matrix to the shader to transform the normals */
				mViewI = scene->m_mView.inverse();
				mViewIT= mViewI.transpose();
				
				PVRTMat3 ViewIT = PVRTMat3(mViewIT);
				
				glUniformMatrix3fv(scene->m_ppsUniforms[m_shader][j].nLocation, 1, GL_FALSE, ViewIT.f);
			}
				break;	
                
            default:
                break;

		}
	}
	
	
	for(int i32MeshIndex  =min; i32MeshIndex < max; i32MeshIndex++)
	{
		//int i32MeshIndex = i;
		//int i32MeshIndex = m_ModelPOD.pNode[i].nIdx;
		//SPODMesh* submesh = &m_ModelPOD.pMesh[i32MeshIndex];
		//int materialIndex = m_ModelPOD.pNode[i].nIdxMaterial;
		//SPODMaterial* pMaterial = &m_ModelPOD.pMaterial[materialIndex];
		
		//glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[i32MeshIndex]);
		int materialIndex = meshInfo[i32MeshIndex].materialIndex;
		
		for(unsigned int j = 0; j < scene->m_pnUniformCnt[m_shader]; ++j)
		{
			unsigned int location = scene->m_ppsUniforms[m_shader][j].nLocation;
			EUniformSemantic semantic = (EUniformSemantic)scene->m_ppsUniforms[m_shader][j].nSemantic; 
			switch(semantic)
			{
				case eUsMATERIALCOLORAMBIENT:
				{
					PVRTVec3 vColour = materialInfo[materialIndex].ambientColor;
					glUniform3f(location, vColour.x, vColour.y, vColour.z);
				}
					break;
				case eUsMATERIALCOLORDIFFUSE:
				{
					PVRTVec3 vColour =  materialInfo[materialIndex].diffuseColor;
					glUniform3f(location, vColour.x, vColour.y, vColour.z);
				}
					break;
					
				case eUsPOSITION:
				{
					glBindBuffer(GL_ARRAY_BUFFER, iVertexVBO[ i32MeshIndex] );
					//glVertexAttribPointer(m_ppsUniforms[m_nCurrentShader][j].nLocation, 3, GL_FLOAT, GL_FALSE, 0, (const void*) NULL);
					glVertexAttribPointer(scene->m_ppsUniforms[m_shader][j].nLocation, 3, GL_FLOAT, GL_FALSE, meshInfo[i32MeshIndex].vertexStride, (const void*) NULL);
					
					glEnableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
				}
					break;
				case eUsNORMAL:
				{
					glBindBuffer(GL_ARRAY_BUFFER, iVertexVBO[ i32MeshIndex]);
					glVertexAttribPointer(scene->m_ppsUniforms[m_shader][j].nLocation, 3, GL_FLOAT, GL_FALSE, 
										  meshInfo[i32MeshIndex].normalStride, (const void*)  meshInfo[i32MeshIndex].normalOffset);
					
					glEnableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
				}
					break;
				case eUsTANGENT:
				{
					glBindBuffer(GL_ARRAY_BUFFER, iVertexVBO[ i32MeshIndex]);
					glVertexAttribPointer(scene->m_ppsUniforms[m_shader][j].nLocation, 3, GL_FLOAT, GL_FALSE, 
										  meshInfo[i32MeshIndex].tangentStride, (const void*)  meshInfo[i32MeshIndex].tangentOffset);
					
					glEnableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
					
				}
					break;
				case eUsUV:
				{
					
					//glVertexAttribPointer(m_ppsUniforms[m_nCurrentShader][j].nLocation, 2, GL_FLOAT, GL_FALSE, 0, (const void*) NULL);
					if( meshInfo[i32MeshIndex].uvOffset != 0)
					{
						glBindBuffer(GL_ARRAY_BUFFER, iVertexVBO[i32MeshIndex]);
						glVertexAttribPointer(scene->m_ppsUniforms[m_shader][j].nLocation, 2, GL_FLOAT, GL_FALSE, 
											  meshInfo[i32MeshIndex].uvStride, (const void*)  meshInfo[i32MeshIndex].uvOffset);
						glEnableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
					}
				}
					break;

				case eUsTEXTURE:
				{
					// Set the sampler variable to the texture unit					
					int index = scene->m_ppsUniforms[m_shader][j].nIdx;
					
					switch(index)
					{
						case 0:
						{
							GLuint tex = m_uiTexture[materialIndex];
							glActiveTexture(GL_TEXTURE0);
							glBindTexture(GL_TEXTURE_2D, tex);
							
							//NSLog(@"Normal map %d %d", index, tex);
							glUniform1i(scene->m_ppsUniforms[m_shader][j].nLocation, index);
						}
							break;
						case 1:
						{
							//NSLog(@"Normal map %d %d", index, m_normalMap);
							glActiveTexture(GL_TEXTURE1);
							glBindTexture(GL_TEXTURE_2D, m_normalMap);
						
							glUniform1i(scene->m_ppsUniforms[m_shader][j].nLocation, index);
						}
							break;
						case 2:
						{
							glActiveTexture(GL_TEXTURE2);
							glBindTexture(GL_TEXTURE_CUBE_MAP, m_cubeMap);

							glUniform1i(scene->m_ppsUniforms[m_shader][j].nLocation, index);
						}
							break;
					}
					
				}				
					break;
					
				case eUsTEXTURE_ENABLED:
				{
					int texture_enabled = 0;
					int texture = (signed int)m_uiTexture[materialIndex];
					if(texture != INT_MAX)
						texture_enabled = 1;
						
					//printf("Texture %d %d\n", m_uiTexture[materialIndex], texture_enabled);
					if(m_normalMap > 0)
					{
						texture_enabled |= 2;
					}
					if(m_cubeMap > 0)
					{
						texture_enabled |= 4;
					}
					
					glUniform1i(scene->m_ppsUniforms[m_shader][j].nLocation, texture_enabled);
				}
					break;
				case eUsANIMATION:
				{
					// Float in the range 0..1: contains this objects distance through its animation.
					float fAnimation = 0.5f * scene->m_fViewAngle / PVRT_PI;
					glUniform1f(scene->m_ppsUniforms[m_shader][j].nLocation, fAnimation);
				}
					break;
				case eUsMATERIALSHININESS:
				{
					float shiness =  materialInfo[materialIndex].shiness;
					glUniform1f(location, shiness);
				}
					break;
				case eUsMATERIALCOLORSPECULAR:
				{
					PVRTVec3 vColour =  materialInfo[materialIndex].specularColor;
					glUniform3f(location, vColour.x, vColour.y, vColour.z);
				}
					break;
				case eUsLIGHTPOSWORLD:
				{
					PVRTVec3 position(45, 72, 52);
					glUniform3f(location, position.x , position.y, position.z);
				}
					break;
                default:
                    break;
			}
		}
		// Load the correct texture using our texture lookup table
		
		
		//glBindBuffer(GL_ARRAY_BUFFER, 0);	// Unbind the last buffer used.
		drawMesh(i32MeshIndex, materialIndex);
	}
	
	/*
	 Disable attributes
	 */
	for(unsigned int j = 0; j < scene->m_pnUniformCnt[m_shader]; ++j)
	{
		switch(scene->m_ppsUniforms[m_shader][j].nSemantic)
		{
			case eUsPOSITION:
			{
				glDisableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
			}
				break;
			case eUsNORMAL:
			{
				glDisableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
			}
				break;
			case eUsUV:
			{
				glDisableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
			}
				break;
		
		}
	}

	glDisable(GL_BLEND);

}
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2PVRScopeRemote::RenderScene()
{
	CPPLProcessingScoped PPLProcessingScoped(m_psSPSCommsData,
		__FUNCTION__, static_cast<unsigned int>(strlen(__FUNCTION__)), m_i32FrameCounter);

	if(m_psSPSCommsData)
	{
		// mark every N frames
		if(!(m_i32FrameCounter % 100))
		{
			char buf[128];
			const int nLen = sprintf(buf, "frame %u", m_i32FrameCounter);
			m_bCommsError |= !pplSendMark(m_psSPSCommsData, buf, nLen);
		}

		// Check for dirty items
		m_bCommsError |= !pplSendProcessingBegin(m_psSPSCommsData, "dirty", static_cast<unsigned int>(strlen("dirty")), m_i32FrameCounter);
		{
			unsigned int nItem, nNewDataLen;
			const char *pData;
			bool bRecompile = false;
			while(pplLibraryDirtyGetFirst(m_psSPSCommsData, &nItem, &nNewDataLen, &pData))
			{
				PVRShellOutputDebug("dirty item %u %u 0x%08x\n", nItem, nNewDataLen, pData);
				switch(nItem)
				{
				case 0:
					delete [] m_pszFragShader;
					m_pszFragShader = new char [nNewDataLen+1];
					strncpy(m_pszFragShader, (char*)pData, nNewDataLen);
					m_pszFragShader[nNewDataLen] = 0;
					bRecompile = true;
					break;

				case 1:
					delete [] m_pszVertShader;
					m_pszVertShader = new char [nNewDataLen+1];
					strncpy(m_pszVertShader, (char*)pData, nNewDataLen);
					m_pszVertShader[nNewDataLen] = 0;
					bRecompile = true;
					break;

				case 2:
					if(nNewDataLen == sizeof(SSPSCommsLibraryTypeFloat))
					{
						const SSPSCommsLibraryTypeFloat * const psData = (SSPSCommsLibraryTypeFloat*)pData;
						m_fMinThickness = psData->fCurrent;
					}
					break;
				case 3:
					if(nNewDataLen == sizeof(SSPSCommsLibraryTypeFloat))
					{
						const SSPSCommsLibraryTypeFloat * const psData = (SSPSCommsLibraryTypeFloat*)pData;
						m_fMaxVariation = psData->fCurrent;
					}
					break;
				}
			}

			if(bRecompile)
			{
				CPVRTString ErrorStr;
				glDeleteProgram(m_ShaderProgram.uiId);
				glDeleteShader(m_uiVertShader);
				glDeleteShader(m_uiFragShader);
				if (!LoadShaders(&ErrorStr, m_pszFragShader, m_pszVertShader))
				{
					PVRShellOutputDebug("%s", ErrorStr.c_str());
				}
			}
		}
		m_bCommsError |= !pplSendProcessingEnd(m_psSPSCommsData);
	}

	if (m_psSPSCommsData)
	{
		m_bCommsError |= !pplSendProcessingBegin(m_psSPSCommsData, "draw", static_cast<unsigned int>(strlen("draw")), m_i32FrameCounter);
	}

	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use shader program
	glUseProgram(m_ShaderProgram.uiId);

	// Bind texture
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiTexture);

	// Rotate and Translation the model matrix
	PVRTMat4 mModel;
	mModel = PVRTMat4::RotationY(m_fAngleY);
	m_fAngleY += (2*PVRT_PI/60)/7;

	// Set model view projection matrix
	PVRTMat4 mModelView, mMVP;
	mModelView = m_mView * mModel;
	mMVP =  m_mProjection * mModelView;
	glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.ptr());

	// Set light direction in model space
	PVRTVec4 vLightDirModel;
	vLightDirModel = mModel.inverse() * PVRTVec4(1, 1, 1, 0);

	glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, &vLightDirModel.x);

	// Set eye position in model space
	PVRTVec4 vEyePosModel;
	vEyePosModel = mModelView.inverse() * PVRTVec4(0, 0, 0, 1);
	glUniform3fv(m_ShaderProgram.uiEyePosLoc, 1, &vEyePosModel.x);

	/*
		Set the iridescent shading parameters
	*/
	// Set the minimum thickness of the coating in nm
	glUniform1f(m_ShaderProgram.uiMinThicknessLoc, m_fMinThickness);

	// Set the maximum variation in thickness of the coating in nm
	glUniform1f(m_ShaderProgram.uiMaxVariationLoc, m_fMaxVariation);

	/*
		Now that the uniforms are set, call another function to actually draw the mesh.
	*/
	DrawMesh(0);

	if (m_psSPSCommsData)
	{
		m_bCommsError |= !pplSendProcessingEnd(m_psSPSCommsData);
		m_bCommsError |= !pplSendProcessingBegin(m_psSPSCommsData, "Print3D", static_cast<unsigned int>(strlen("Print3D")), m_i32FrameCounter);
	}

	// Displays the demo name using the tools. For a detailed explanation, see the example IntroducingPVRTools
	if(m_bCommsError)
	{
		m_Print3D.DisplayDefaultTitle("PVRScopeRemote", "Remote APIs\n\nError:\n  PVRScopeComms failed\n  Is PVRPerfServer connected?", ePVRTPrint3DSDKLogo);
		m_bCommsError = false;
	}
	else
		m_Print3D.DisplayDefaultTitle("PVRScopeRemote", "Remote APIs", ePVRTPrint3DSDKLogo);

	m_Print3D.Flush();

	if (m_psSPSCommsData)
	{
		m_bCommsError |= !pplSendProcessingEnd(m_psSPSCommsData);
	}

	// send counters
	m_anCounterReadings[eCounter]	= m_i32FrameCounter;
	m_anCounterReadings[eCounter10]	= m_i32Frame10Counter;
	if(m_psSPSCommsData)
	{
		m_bCommsError |= !pplCountersUpdate(m_psSPSCommsData, m_anCounterReadings);
	}

	// update some counters
	++m_i32FrameCounter;
	if(0 == (m_i32FrameCounter / 10) % 10)
	{
		m_i32Frame10Counter += 10;
	}

	return true;
}
Esempio n. 10
0
/*******************************************************************************
 * Function Name : DrawModel
 * Description   : Draws the model
 *******************************************************************************/
void OGLES2Shaders::DrawModel()
{
	// Use the loaded effect
	m_ppEffect[m_nCurrentShader]->Activate();

	/*
		Set attributes and uniforms
	*/
	const CPVRTArray<SPVRTPFXUniform>& Uniforms = m_ppEffect[m_nCurrentShader]->GetUniformArray();

	for(unsigned int j = 0; j < Uniforms.GetSize(); ++j)
	{

		switch(Uniforms[j].nSemantic)
		{
		case ePVRTPFX_UsPOSITION:
			{
				glBindBuffer(GL_ARRAY_BUFFER, m_Surface->iVertexVBO);
				glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, 0, (const void*) NULL);
				glEnableVertexAttribArray(Uniforms[j].nLocation);
			}
			break;
		case ePVRTPFX_UsNORMAL:
			{
				glBindBuffer(GL_ARRAY_BUFFER, m_Surface->iNormalVBO);
				glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, 0, (const void*) NULL);
				glEnableVertexAttribArray(Uniforms[j].nLocation);
			}
			break;
		case ePVRTPFX_UsUV:
			{
				glBindBuffer(GL_ARRAY_BUFFER, m_Surface->iUvVBO);
				glVertexAttribPointer(Uniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, 0, (const void*) NULL);
				glEnableVertexAttribArray(Uniforms[j].nLocation);
			}
			break;
		case ePVRTPFX_UsWORLDVIEWPROJECTION:
			{
				PVRTMat4 mMVP;

				/* Passes the model-view-projection matrix (MVP) to the shader to transform the vertices */
				mMVP = m_mProjection * m_mModelView;
				glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mMVP.f);
			}
			break;
		case ePVRTPFX_UsWORLDVIEW:
			{
				glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, m_mModelView.f);
			}
			break;
		case ePVRTPFX_UsWORLDVIEWIT:
			{
				PVRTMat4 mModelViewI, mModelViewIT;

				/* Passes the inverse transpose of the model-view matrix to the shader to transform the normals */
				mModelViewI = m_mModelView.inverse();
				mModelViewIT= mModelViewI.transpose();
				PVRTMat3 ModelViewIT = PVRTMat3(mModelViewIT);

				glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, ModelViewIT.f);
			}
			break;
		case ePVRTPFX_UsVIEWIT:
			{
				PVRTMat4 mViewI, mViewIT;

				/* Passes the inverse transpose of the model-view matrix to the shader to transform the normals */
				mViewI = m_mView.inverse();
				mViewIT= mViewI.transpose();

				PVRTMat3 ViewIT = PVRTMat3(mViewIT);

				glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, ViewIT.f);
			}
			break;
		case ePVRTPFX_UsTEXTURE:
			{
				// Set the sampler variable to the texture unit
				glUniform1i(Uniforms[j].nLocation, Uniforms[j].nIdx);
			}
			break;
		case ePVRTPFX_UsANIMATION:
			{
				// Float in the range 0..1: contains this objects distance through its animation.
				float fAnimation = 0.5f * m_fViewAngle / PVRT_PI;
				glUniform1f(Uniforms[j].nLocation, fAnimation);
			}
			break;
		}
	}

	glBindBuffer(GL_ARRAY_BUFFER, 0);	// Unbind the last buffer used.

	glDrawElements(GL_TRIANGLES, m_Surface->GetNumFaces()*3, GL_UNSIGNED_SHORT, m_Surface->pIndex);

	/*
		Disable attributes
	*/
	for(unsigned int j = 0; j < Uniforms.GetSize(); ++j)
	{
		switch(Uniforms[j].nSemantic)
		{
		case ePVRTPFX_UsPOSITION:
			{
				glDisableVertexAttribArray(Uniforms[j].nLocation);
			}
			break;
		case ePVRTPFX_UsNORMAL:
			{
				glDisableVertexAttribArray(Uniforms[j].nLocation);
			}
			break;
		case ePVRTPFX_UsUV:
			{
				glDisableVertexAttribArray(Uniforms[j].nLocation);
			}
			break;
		}
	}

	return;
}
/*!***************************************************************************
 @Function		DrawPODScene
 @Input         mViewProjection
 @Input         bDrawCamera
 @Description   Draws the scene described by the loaded POD file.
 *****************************************************************************/
void OGLES3TextureStreaming::DrawPODScene(const PVRTMat4 &mViewProjection)
{
	// Clear the colour and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Get the position of the first light from the scene.
	PVRTVec4 vLightPosition = m_Scene.GetLightPosition(0);
	int iTVCount            = 0;

#if defined(__ANDROID__)
	// Check if the MVP has changed
	if (m_Camera.HasImageChanged() && m_Camera.HasProjectionMatrixChanged())
	{
		m_TexCoordsProjection = PVRTMat4(m_Camera.GetProjectionMatrix());
	}
#endif

	for(unsigned int i = 0; i < m_Scene.nNumMeshNode; ++i)
	{
		SPODNode& Node = m_Scene.pNode[i];

		bool bIsTVScreen   = Node.nIdxMaterial == m_uiTVScreen;
		bool bIsRecordGlow = Node.nIdxMaterial == m_uiRecordGlow;

		// Get the node model matrix
		PVRTMat4 mWorld = m_Scene.GetWorldMatrix(Node);
		PVRTMat4 mModelView, mMVP;
		mMVP = mViewProjection * mWorld;

		GLint iMVPLoc = -1;
#if defined(__ANDROID__)
		GLint iTexProjLoc = -1;
#endif
		if(bIsTVScreen) // If we're drawing the TV screen change to the correct TV shader
		{
			_ASSERT(iTVCount < c_numTVScreens);
			if(c_screenEffects[iTVCount] == eTVNoise)
			{
				glUseProgram(m_TVNoiseShaderProgram.uiId);
				iMVPLoc = m_TVNoiseShaderProgram.uiMVP;
#if defined(__ANDROID__)
				iTexProjLoc = m_TVNoiseShaderProgram.uiVideoTexProjM;
#endif

				// Do the screen scrolling
				float fBandY1 = m_fBandScroll;
				float fBandY2 = fBandY1 + c_fBandWidth;
				glUniform2f(m_TVNoiseShaderProgram.uiScreenBand, fBandY1, fBandY2);

				// Do the noise
				PVRTVec2 vNoiseCoords;
				vNoiseCoords.x = (m_iNoiseCoordIdx % 4) * 0.25f;
				vNoiseCoords.y = (m_iNoiseCoordIdx / 4) * 0.25f;

				// Set the texmod value
				glUniform2f(m_TVNoiseShaderProgram.uiNoiseLoc, vNoiseCoords.x, vNoiseCoords.y);

				// Increment and reset
				m_iNoiseCoordIdx++;
				if(m_iNoiseCoordIdx >= 16)
					m_iNoiseCoordIdx = 0;
			}
			else if(c_screenEffects[iTVCount] == eTVGreyscale)
			{
				glUseProgram(m_TVGreyscaleShaderProgram.uiId);
				iMVPLoc = m_TVGreyscaleShaderProgram.uiMVP;
#if defined(__ANDROID__)
				iTexProjLoc = m_TVGreyscaleShaderProgram.uiVideoTexProjM;
#endif
			}
			else if(c_screenEffects[iTVCount] == eTVColour)
			{
				glUseProgram(m_TVShaderProgram.uiId);
				iMVPLoc = m_TVShaderProgram.uiMVP;
#if defined(__ANDROID__)
				iTexProjLoc = m_TVShaderProgram.uiVideoTexProjM;
#endif
			}
			else
			{
				_ASSERT(false); // Invalid enum
			}
			iTVCount++;
		}
		else if(bIsRecordGlow)
		{
			// Should the glow be active?
			unsigned long ulNow = PVRShellGetTime();
			if(ulNow - m_ulGlowTime > 1000)
			{
				m_bGlowState = !m_bGlowState;
				m_ulGlowTime = ulNow;
			}

			if(!m_bGlowState)
				continue;

			glEnable(GL_BLEND);
			glUseProgram(m_AmbientShaderProgram.uiId);
			iMVPLoc = m_AmbientShaderProgram.uiMVP;
		}
		else
		{
			glUseProgram(m_LitProgram.uiId);
			iMVPLoc = m_LitProgram.uiMVP;
		}

		glUniformMatrix4fv(iMVPLoc, 1, GL_FALSE, mMVP.f);

		// Pass the light position in model space to the shader. Don't do this for the TV screen.
		if(!bIsTVScreen && !bIsRecordGlow)
		{
			PVRTVec4 vLightPos;
			vLightPos = mWorld.inverse() * vLightPosition;

			glUniform3fv(m_LitProgram.uiLightPosition, 1, &vLightPos.x);
		}

		// Bind the correct texture
		if(Node.nIdxMaterial != -1)
		{
            if(Node.nIdxMaterial == m_uiTVScreen && m_i32Frame != 0)
			{
#if defined(__ANDROID__)
				GLuint yuvTexture = m_Camera.GetYUVTexture();

				glActiveTexture(GL_TEXTURE0);
				glBindTexture(GL_TEXTURE_EXTERNAL_OES, yuvTexture);

				// Set the sampler projection
				glUniformMatrix4fv(iTexProjLoc, 1, GL_FALSE, m_TexCoordsProjection.f);
#elif defined(__APPLE__)
				GLuint lumaTexure    = m_Camera.GetLuminanceTexture();
				GLuint chromaTexture = m_Camera.GetChrominanceTexture();
				GLenum lumaTarget    = m_Camera.GetLuminanceTextureTarget();
				GLenum chromaTarget  = m_Camera.GetChrominanceTextureTarget();

				glActiveTexture(GL_TEXTURE0);
				glBindTexture(lumaTarget, lumaTexure);

				glActiveTexture(GL_TEXTURE1);
				glBindTexture(chromaTarget, chromaTexture);
#endif

				if(c_screenEffects[iTVCount] == eTVNoise)
				{
					// Bind the noise texture
					glActiveTexture(GL_TEXTURE2);
					glBindTexture(GL_TEXTURE_2D, m_uiNoiseTex);
				}
			}
			else
			{
				glActiveTexture(GL_TEXTURE0);
				glBindTexture(GL_TEXTURE_2D, m_puiTextureIDs[Node.nIdxMaterial]);
			}
		}
		else
		{
			glActiveTexture(GL_TEXTURE0);
			glBindTexture(GL_TEXTURE_2D, 0);
		}

		/*
		 Now that the model-view matrix is set and the materials ready,
		 call another function to actually draw the mesh.
		 */
		DrawMesh(Node.nIdx, (bIsTVScreen || bIsRecordGlow) ? false : true);

		if(bIsRecordGlow)
		{
			glDisable(GL_BLEND);
		}
	}
}
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLESPVRScopeExample::RenderScene()
{
	// Keyboard input (cursor up/down to cycle through counters)
	if(PVRShellIsKeyPressed(PVRShellKeyNameUP))
	{
		m_i32Counter++;

		if(m_i32Counter > (int) m_pScopeGraph->GetCounterNum())
			m_i32Counter = m_pScopeGraph->GetCounterNum();
	}

	if(PVRShellIsKeyPressed(PVRShellKeyNameDOWN))
	{
		m_i32Counter--;

		if(m_i32Counter < 0)
			m_i32Counter = 0;
	}

	if(PVRShellIsKeyPressed(PVRShellKeyNameACTION2))
		m_pScopeGraph->ShowCounter(m_i32Counter, !m_pScopeGraph->IsCounterShown(m_i32Counter));

	// Keyboard input (cursor left/right to change active group)
	if(PVRShellIsKeyPressed(PVRShellKeyNameRIGHT))
	{
		m_pScopeGraph->SetActiveGroup(m_pScopeGraph->GetActiveGroup()+1);
	}

	if(PVRShellIsKeyPressed(PVRShellKeyNameLEFT))
	{
		m_pScopeGraph->SetActiveGroup(m_pScopeGraph->GetActiveGroup()-1);
	}

	// Clears the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Loads the projection matrix
	glMatrixMode(GL_PROJECTION);
	glLoadMatrixf(m_mProjection.f);

	// Specify the modelview matrix
	PVRTMat4 mModel;
	SPODNode& Node = m_Scene.pNode[0];

	m_Scene.GetWorldMatrix(mModel, Node);

	// Rotate and Translate the model matrix
	m_fAngleY += (2*PVRT_PIf/60)/7;

	// Set model view projection matrix
	PVRTMat4 mModelView;
	mModelView = m_mView * PVRTMat4::RotationY(m_fAngleY) * mModel;

	glMatrixMode(GL_MODELVIEW);
	glLoadMatrixf(mModelView.f);

	/*
		Load the light direction from the scene if we have one
	*/

	// Enables lighting. See BasicTnL for a detailed explanation
	glEnable(GL_LIGHTING);
	glEnable(GL_LIGHT0);

	// Set light direction
	PVRTVec4 vLightDirModel;
	vLightDirModel = mModel.inverse() * PVRTVec4(1, 1, 1, 0);
	glLightfv(GL_LIGHT0, GL_POSITION, (float*)&vLightDirModel.x);

	// Enable the vertex position attribute array
	glEnableClientState(GL_VERTEX_ARRAY);

	// bind the texture
	glBindTexture(GL_TEXTURE_2D, m_uiTexture);

	/*
		Now that the model-view matrix is set and the materials are ready,
		call another function to actually draw the mesh.
	*/
	DrawMesh(Node.nIdx);

	// Disable the vertex positions
	glDisableClientState(GL_VERTEX_ARRAY);

	char Description[256];

	if(m_pScopeGraph->GetCounterNum())
	{
		sprintf(Description, "Active Grp %i\n\nCounter %i (Grp %i) \nName: %s\nShown: %s\nuser y-axis: %.2f  max: %.2f%s",
			m_pScopeGraph->GetActiveGroup(), m_i32Counter,
			m_pScopeGraph->GetCounterGroup(m_i32Counter),
			m_pScopeGraph->GetCounterName(m_i32Counter),
			m_pScopeGraph->IsCounterShown(m_i32Counter) ? "Yes" : "No",
			m_pScopeGraph->GetMaximum(m_i32Counter),
			m_pScopeGraph->GetMaximumOfData(m_i32Counter),
			m_pScopeGraph->IsCounterPercentage(m_i32Counter) ? "%%" : "");
	}
	else
	{
		sprintf(Description, "No counters present");
	}

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("PVRScopeExample", Description, ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();

	// Update counters and draw the graph
	m_pScopeGraph->Ping();

	return true;
}
Esempio n. 13
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2ShadowMapping::RenderScene()
{
	//rotate light position
	m_fLightAngle += 0.01f;
	m_vLightPosition.x = m_fLightDistance * (float) cos(m_fLightAngle);
	m_vLightPosition.z = m_fLightDistance * (float) sin(m_fLightAngle);
	m_vLightDirection.x = -m_vLightPosition.x;
	m_vLightDirection.z = -m_vLightPosition.z;

	SetUpMatrices();

	glEnable(GL_DEPTH_TEST);

	// Bind the frame buffer object
	glBindFramebuffer(GL_FRAMEBUFFER, m_uiFrameBufferObject);

	if(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE)
	{
		// Clear the screen and depth buffer so we can render from the light's view
		glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

		// Set the current viewport to our texture size
		glViewport(0, 0, m_ui32ShadowMapSize, m_ui32ShadowMapSize);

		// Since we don't care about colour when rendering the depth values to
		// the shadow-map texture, we disable color writing to increase speed.
		glColorMask(GL_FALSE, GL_FALSE, GL_FALSE, GL_FALSE); 

		// Enable the simple shader for the light view pass. This render will not be shown to the user 
		// so only the simplest render needs to be implemented
		glUseProgram(m_SimpleShaderProgram.uiId);

		// Set the light projection matrix
		glUniformMatrix4fv(m_SimpleShaderProgram.uiProjectionMatrixLoc, 1, GL_FALSE, m_LightProjection.f);

		// Render the world according to the light's view
		DrawScene(m_LightView);

		// We can turn color writing back on since we already stored the depth values
		glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE); 

		// Restore our normal viewport size to our screen width and height
		glViewport(0, 0,PVRShellGet(prefWidth),PVRShellGet(prefHeight));
	}

	glBindFramebuffer(GL_FRAMEBUFFER, 0);

	// Clear the colour and depth buffers, we are now going to render the scene again from scratch
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	
	// Load the shadow shader. This shader requires additional parameters; texProjMatrix for the depth buffer 
	// look up and the light direction for diffuse light (the effect is a lot nicer with the additon of the 
	// diffuse light).
	glUseProgram(m_ShadowShaderProgram.uiId);
	
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiShadowMapTexture);

	glUniformMatrix4fv(m_ShadowShaderProgram.uiProjectionMatrixLoc, 1, GL_FALSE, m_Projection.f);

	PVRTMat4 mViewInv, mTextureMatrix, mMatrix;
	mViewInv = m_View.inverse();

	// We need to calculate the texture projection matrix. This matrix takes the pixels from world space to previously rendered light projection space
	//where we can look up values from our saved depth buffer. The matrix is constructed from the light view and projection matrices as used for the previous render and 
	//then multiplied by the inverse of the current view matrix. 
	mTextureMatrix = m_BiasMatrix * m_LightProjection *  m_LightView * mViewInv;

	glUniformMatrix4fv(m_ShadowShaderProgram.uiTexProjMatrixLoc, 1, GL_FALSE, mTextureMatrix.f);

	DrawSceneWithShadow(m_View);

	// Re-enable the simple shader to draw the light source object
	glUseProgram(m_SimpleShaderProgram.uiId);

	SPODNode& Node = m_Scene.pNode[1];

	PVRTMat4 mWorld, mModelView;

	m_Scene.GetWorldMatrix(mWorld, Node);

	mWorld.f[12] = m_vLightPosition.x;
	mWorld.f[13] = m_vLightPosition.y;
	mWorld.f[14] = m_vLightPosition.z;

	mModelView = m_View * mWorld;

	glUniformMatrix4fv(m_SimpleShaderProgram.uiModelViewMatrixLoc, 1, GL_FALSE, mModelView.f);
	glUniformMatrix4fv(m_SimpleShaderProgram.uiProjectionMatrixLoc, 1, GL_FALSE, m_LightProjection.f);

	DrawMesh(1);

	m_Print3D.DisplayDefaultTitle("ShadowMap", "", ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();

	return true;
}
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occurred
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevant OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLESPVRScopeRemote::RenderScene()
{
	CPPLProcessingScoped PPLProcessingScoped(m_psSPSCommsData,
		__FUNCTION__, static_cast<unsigned int>(strlen(__FUNCTION__)), m_i32FrameCounter);

	if(m_psSPSCommsData)
	{
		// mark every N frames
		if(!(m_i32FrameCounter % 100))
		{
			char buf[128];
			const int nLen = sprintf(buf, "frame %u", m_i32FrameCounter);
			m_bCommsError |= !pplSendMark(m_psSPSCommsData, buf, nLen);
		}

		// Check for dirty items
		m_bCommsError |= !pplSendProcessingBegin(m_psSPSCommsData, "dirty", static_cast<unsigned int>(strlen("dirty")), m_i32FrameCounter);
		{
			unsigned int nItem, nNewDataLen;
			const char *pData;
			while(pplLibraryDirtyGetFirst(m_psSPSCommsData, &nItem, &nNewDataLen, &pData))
			{
				PVRShellOutputDebug("dirty item %u %u 0x%08x\n", nItem, nNewDataLen, pData);
				switch(nItem)
				{
				case 0:
					if(nNewDataLen == sizeof(SSPSCommsLibraryTypeFloat))
					{
						const SSPSCommsLibraryTypeFloat * const psData = (SSPSCommsLibraryTypeFloat*)pData;
						m_fMinThickness = psData->fCurrent;
					}
					break;
				case 1:
					if(nNewDataLen == sizeof(SSPSCommsLibraryTypeFloat))
					{
						const SSPSCommsLibraryTypeFloat * const psData = (SSPSCommsLibraryTypeFloat*)pData;
						m_fMaxVariation = psData->fCurrent;
					}
					break;
				}
			}
		}
		m_bCommsError |= !pplSendProcessingEnd(m_psSPSCommsData);
	}

	if (m_psSPSCommsData)
	{
		m_bCommsError |= !pplSendProcessingBegin(m_psSPSCommsData, "draw", static_cast<unsigned int>(strlen("draw")), m_i32FrameCounter);
	}

	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Loads the projection matrix
	glMatrixMode(GL_PROJECTION);
	glLoadMatrixf(m_mProjection.f);

	// Specify the modelview matrix
	PVRTMat4 mModel;
	SPODNode& Node = m_Scene.pNode[0];

	m_Scene.GetWorldMatrix(mModel, Node);

	// Rotate and Translate the model matrix
	m_fAngleY += (2*PVRT_PIf/60)/7;

	// Set model view projection matrix
	PVRTMat4 mModelView;
	mModelView = m_mView * PVRTMat4::RotationY(m_fAngleY) * mModel;

	glMatrixMode(GL_MODELVIEW);
	glLoadMatrixf(mModelView.f);

	/*
		Load the light direction from the scene if we have one
	*/

	// Enables lighting. See BasicTnL for a detailed explanation
	glEnable(GL_LIGHTING);
	glEnable(GL_LIGHT0);

	// Set light direction
	PVRTVec4 vLightDirModel;
	vLightDirModel = mModel.inverse() * PVRTVec4(1, 1, 1, 0);
	glLightfv(GL_LIGHT0, GL_POSITION, (float*)&vLightDirModel.x);

	// Enable the vertex position attribute array
	glEnableClientState(GL_VERTEX_ARRAY);

	// bind the texture
	glBindTexture(GL_TEXTURE_2D, m_uiTexture);

	/*
		Now that the model-view matrix is set and the materials are ready,
		call another function to actually draw the mesh.
	*/
	DrawMesh(Node.nIdx);

	// Disable the vertex positions
	glDisableClientState(GL_VERTEX_ARRAY);

	if (m_psSPSCommsData)
	{
		m_bCommsError |= !pplSendProcessingEnd(m_psSPSCommsData);
		m_bCommsError |= !pplSendProcessingBegin(m_psSPSCommsData, "Print3D", static_cast<unsigned int>(strlen("Print3D")), m_i32FrameCounter);
	}

	// Displays the demo name using the tools. For a detailed explanation, see the example IntroducingPVRTools
	if(m_bCommsError)
	{
		m_Print3D.DisplayDefaultTitle("PVRScopeRemote", "Remote APIs\n\nError:\n  PVRScopeComms failed\n  Is PVRPerfServer connected?", ePVRTPrint3DSDKLogo);
		m_bCommsError = false;
	}
	else
		m_Print3D.DisplayDefaultTitle("PVRScopeRemote", "Remote APIs", ePVRTPrint3DSDKLogo);

	m_Print3D.Flush();

	if (m_psSPSCommsData)
	{
		m_bCommsError |= !pplSendProcessingEnd(m_psSPSCommsData);
	}

	// send counters
	m_anCounterReadings[eCounter]	= m_i32FrameCounter;
	m_anCounterReadings[eCounter10]	= m_i32Frame10Counter;
	if(m_psSPSCommsData)
	{
		m_bCommsError |= !pplCountersUpdate(m_psSPSCommsData, m_anCounterReadings);
	}

	// update some counters
	++m_i32FrameCounter;
	if(0 == (m_i32FrameCounter / 10) % 10)
	{
		m_i32Frame10Counter += 10;
	}

	return true;
}
Esempio n. 15
0
/*!****************************************************************************
 @Function		RenderSceneWithEffect
 @Return		bool		true if no error occured
 @Description	Renders the whole scene with a single effect.
******************************************************************************/
bool OGLES3ShadowMapping::RenderSceneWithEffect(const int uiEffectId, const PVRTMat4 &mProjection, const PVRTMat4 &mView)
{
	CPVRTPFXEffect *pEffect = m_ppPFXEffects[uiEffectId];

	// Activate the passed effect
	pEffect->Activate();
	
	for (unsigned int i=0; i < m_Scene.nNumMeshNode; i++)
	{
		SPODNode* pNode = &m_Scene.pNode[i];
		SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx];
		SPODMaterial *pMaterial = 0;

		if (pNode->nIdxMaterial != -1)
		{
			pMaterial = &m_Scene.pMaterial[pNode->nIdxMaterial];	

			// Bind the texture if there is one bound to this object
			if (pMaterial->nIdxTexDiffuse != -1)
			{	
				CPVRTString texname = CPVRTString(m_Scene.pTexture[pMaterial->nIdxTexDiffuse].pszName).substitute(".png", "");
				CPVRTStringHash hashedName(texname);
				if (m_TextureCache.Exists(hashedName))
					glBindTexture(GL_TEXTURE_2D, m_TextureCache[hashedName]);
			}
		}
		
		glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[i]);
		glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[i]);

		// Pre-calculate commonly used matrices
		PVRTMat4 mWorld;
		m_Scene.GetWorldMatrix(mWorld, *pNode);
		PVRTMat4 mWorldView = mView * mWorld;

		// Bind semantics
		const CPVRTArray<SPVRTPFXUniform>& Uniforms = pEffect->GetUniformArray();
		for(unsigned int j = 0; j < Uniforms.GetSize(); ++j)
		{
			switch(Uniforms[j].nSemantic)
			{
			case ePVRTPFX_UsPOSITION:
				{
					glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData);
					glEnableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
			case ePVRTPFX_UsNORMAL:
				{
					glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData);
					glEnableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
			case ePVRTPFX_UsUV:
				{
					glVertexAttribPointer(Uniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
					glEnableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
			case ePVRTPFX_UsMATERIALCOLORDIFFUSE:
				{										
					if (pMaterial)
						glUniform4f(Uniforms[j].nLocation, pMaterial->pfMatDiffuse[0], pMaterial->pfMatDiffuse[1], pMaterial->pfMatDiffuse[2], 1.0f);
				}
				break;			
			case ePVRTPFX_UsWORLDVIEWPROJECTION:
				{
					PVRTMat4 mWorldViewProj = mProjection * mWorldView;					
					glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mWorldViewProj.f);
				}
				break;
			case ePVRTPFX_UsWORLDI:
				{
					PVRTMat3 mWorldI3x3(mWorld.inverse());
					glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, mWorldI3x3.f);
				}
				break;
			case ePVRTPFX_UsWORLDVIEWIT:
				{
					PVRTMat3 mWorldViewIT3x3(mWorldView.inverse().transpose());
					glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, mWorldViewIT3x3.f);
				}
				break;
			case ePVRTPFX_UsTEXTURE:
				{
					// Set the sampler variable to the texture unit
					glUniform1i(Uniforms[j].nLocation, Uniforms[j].nIdx);
				}		
				break;			
			case ePVRTPFX_UsLIGHTPOSWORLD:
				{					
					glUniform3fv(Uniforms[j].nLocation, 1, m_vLightPosition.ptr());
				}
				break;			
			case eCUSTOMSEMANTIC_SHADOWTRANSMATRIX:
				{					
					// We need to calculate the texture projection matrix. This matrix takes the pixels from world space to previously rendered light projection space
					//where we can look up values from our saved depth buffer. The matrix is constructed from the light view and projection matrices as used for the previous render and 
					//then multiplied by the inverse of the current view matrix.
					//PVRTMat4 mTextureMatrix = m_mBiasMatrix * m_mLightProjection *  m_mLightView * mView.inverse();
					PVRTMat4 mTextureMatrix = m_mBiasMatrix * m_mLightProjection *  m_mLightView * mWorld;
					glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mTextureMatrix.f);
				}
				break;
			case ePVRTPFX_UsRANDOM:
				{					
					glUniform1f(Uniforms[j].nLocation, m_fBias);
				}
				break;			
			default:
				{
					PVRShellOutputDebug("Error: Unhandled semantic in RenderSceneWithEffect()\n");
					return false;
				}
			}
		}

		//	Now that all uniforms are set and the materials ready, draw the mesh.		
		glDrawElements(GL_TRIANGLES, pMesh->nNumFaces*3, GL_UNSIGNED_SHORT, 0);

		// Disable all vertex attributes
		for(unsigned int j = 0; j < Uniforms.GetSize(); ++j)
		{
			switch(Uniforms[j].nSemantic)
			{
			case ePVRTPFX_UsPOSITION:
			case ePVRTPFX_UsNORMAL:
			case ePVRTPFX_UsUV:
				glDisableVertexAttribArray(Uniforms[j].nLocation);
				break;
			}
		}
	}

	glBindBuffer(GL_ARRAY_BUFFER, 0);
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

	return true;
}
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2ChameleonMan::RenderScene()
{
	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use shader program
	glUseProgram(m_SkinnedShaderProgram.uiId);

	if(PVRShellIsKeyPressed(PVRShellKeyNameACTION1))
	{
		m_bEnableDOT3 = !m_bEnableDOT3;
		glUniform1i(m_SkinnedShaderProgram.auiLoc[ebUseDot3], m_bEnableDOT3);
	}

	/*
		Calculates the frame number to animate in a time-based manner.
		Uses the shell function PVRShellGetTime() to get the time in milliseconds.
	*/
	unsigned long iTime = PVRShellGetTime();

	if(iTime > m_iTimePrev)
	{
		float fDelta = (float) (iTime - m_iTimePrev);
		m_fFrame += fDelta * g_fDemoFrameRate;

		// Increment the counters to make sure our animation works
		m_fLightPos	+= fDelta * 0.0034f;
		m_fWallPos	+= fDelta * 0.00027f;
		m_fBackgroundPos += fDelta * -0.000027f;

		// Wrap the Animation back to the Start
		if(m_fLightPos >= PVRT_TWO_PI)
			m_fLightPos -= PVRT_TWO_PI;

		if(m_fWallPos >= PVRT_TWO_PI)
			m_fWallPos -= PVRT_TWO_PI;

		if(m_fBackgroundPos <= 0)
			m_fBackgroundPos += 1.0f;

		if(m_fFrame > m_Scene.nNumFrame - 1)
			m_fFrame = 0;
	}

	m_iTimePrev	= iTime;

	// Set the scene animation to the current frame
	m_Scene.SetFrame(m_fFrame);

	// Set up camera
	PVRTVec3	vFrom, vTo, vUp(0.0f, 1.0f, 0.0f);
	PVRTMat4 mView, mProjection;
	PVRTVec3	LightPos;
	float fFOV;
	int i;

	bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen);

	// Get the camera position, target and field of view (fov)
	if(m_Scene.pCamera[0].nIdxTarget != -1) // Does the camera have a target?
		fFOV = m_Scene.GetCameraPos( vFrom, vTo, 0); // vTo is taken from the target node
	else
		fFOV = m_Scene.GetCamera( vFrom, vTo, vUp, 0); // vTo is calculated from the rotation

	fFOV *= bRotate ? (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight) : (float)PVRShellGet(prefHeight)/(float)PVRShellGet(prefWidth);

	/*
		We can build the model view matrix from the camera position, target and an up vector.
		For this we use PVRTMat4::LookAtRH().
	*/
	mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp);

	// Calculate the projection matrix
	mProjection = PVRTMat4::PerspectiveFovRH(fFOV,  (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), g_fCameraNear, g_fCameraFar, PVRTMat4::OGL, bRotate);

	// Update Light Position and related VGP Program constant
	LightPos.x = 200.0f;
	LightPos.y = 350.0f;
	LightPos.z = 200.0f * PVRTABS(sin((PVRT_PI / 4.0f) + m_fLightPos));

	glUniform3fv(m_SkinnedShaderProgram.auiLoc[eLightPos], 1, LightPos.ptr());

	// Set up the View * Projection Matrix
	PVRTMat4 mViewProjection;

	mViewProjection = mProjection * mView;
	glUniformMatrix4fv(m_SkinnedShaderProgram.auiLoc[eViewProj], 1, GL_FALSE, mViewProjection.ptr());

	// Enable the vertex attribute arrays
	for(i = 0; i < eNumAttribs; ++i) glEnableVertexAttribArray(i);

	// Draw skinned meshes
	for(unsigned int i32NodeIndex = 0; i32NodeIndex < 3; ++i32NodeIndex)
	{
		// Bind correct texture
		switch(i32NodeIndex)
		{
			case eBody:
				glActiveTexture(GL_TEXTURE1);
				glBindTexture(GL_TEXTURE_2D, m_ui32TexHeadNormalMap);
				glActiveTexture(GL_TEXTURE0);
				glBindTexture(GL_TEXTURE_2D, m_ui32TexHeadBody);
				break;
			case eLegs:
				glActiveTexture(GL_TEXTURE1);
				glBindTexture(GL_TEXTURE_2D, m_ui32TexLegsNormalMap);
				glActiveTexture(GL_TEXTURE0);
				glBindTexture(GL_TEXTURE_2D, m_ui32TexLegs);
				break;
			default:
				glActiveTexture(GL_TEXTURE1);
				glBindTexture(GL_TEXTURE_2D, m_ui32TexBeltNormalMap);
				glActiveTexture(GL_TEXTURE0);
				glBindTexture(GL_TEXTURE_2D, m_ui32TexBelt);
				break;
		}

		DrawSkinnedMesh(i32NodeIndex);
	}

	// Safely disable the vertex attribute arrays
	for(i = 0; i < eNumAttribs; ++i) glDisableVertexAttribArray(i);

	// Draw non-skinned meshes
	glUseProgram(m_DefaultShaderProgram.uiId);

	// Enable the vertex attribute arrays
	for(i = 0; i < eNumDefaultAttribs; ++i) glEnableVertexAttribArray(i);

	for(unsigned int i32NodeIndex = 3; i32NodeIndex < m_Scene.nNumMeshNode; ++i32NodeIndex)
	{
		SPODNode& Node = m_Scene.pNode[i32NodeIndex];
		SPODMesh& Mesh = m_Scene.pMesh[Node.nIdx];

		// bind the VBO for the mesh
		glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[Node.nIdx]);

		// bind the index buffer, won't hurt if the handle is 0
		glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[Node.nIdx]);

		// Get the node model matrix
		PVRTMat4 mWorld;
		mWorld = m_Scene.GetWorldMatrix(Node);

		// Setup the appropriate texture and transformation (if needed)
		switch(i32NodeIndex)
		{
			case eWall:
				glBindTexture(GL_TEXTURE_2D, m_ui32TexWall);

				// Rotate the wall mesh which is circular
				mWorld *= PVRTMat4::RotationY(m_fWallPos);

				glUniform1f(m_DefaultShaderProgram.auiLoc[eDefaultUOffset], 0);

				break;
			case eBackground:
				glBindTexture(GL_TEXTURE_2D, m_ui32TexSkyLine);

				glUniform1f(m_DefaultShaderProgram.auiLoc[eDefaultUOffset], m_fBackgroundPos);
				break;
			case eLights:
				{
					glBindTexture(GL_TEXTURE_2D, m_ui32TexLamp);

					PVRTMat4 mWallWorld = m_Scene.GetWorldMatrix(m_Scene.pNode[eWall]);
					mWorld = mWallWorld * PVRTMat4::RotationY(m_fWallPos) * mWallWorld.inverse() * mWorld;

					glUniform1f(m_DefaultShaderProgram.auiLoc[eDefaultUOffset], 0);
				}
				break;
			default:
			break;
		};

		// Set up shader uniforms
		PVRTMat4 mModelViewProj;
		mModelViewProj = mViewProjection * mWorld;
		glUniformMatrix4fv(m_DefaultShaderProgram.auiLoc[eDefaultMVPMatrix], 1, GL_FALSE, mModelViewProj.ptr());

		// Set the vertex attribute offsets
		glVertexAttribPointer(DEFAULT_VERTEX_ARRAY, 3, GL_FLOAT, GL_FALSE, Mesh.sVertex.nStride,  Mesh.sVertex.pData);
		glVertexAttribPointer(DEFAULT_TEXCOORD_ARRAY, 2, GL_FLOAT, GL_FALSE, Mesh.psUVW[0].nStride, Mesh.psUVW[0].pData);

		// Indexed Triangle list
		glDrawElements(GL_TRIANGLES, Mesh.nNumFaces*3, GL_UNSIGNED_SHORT, 0);
	}

	// Safely disable the vertex attribute arrays
	for(i = 0; i < eNumAttribs; ++i) glDisableVertexAttribArray(i);

	// unbind the VBOs
	glBindBuffer(GL_ARRAY_BUFFER, 0);
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

	// Display the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	const char * pDescription;

	if(m_bEnableDOT3)
		pDescription = "Skinning with DOT3 Per Pixel Lighting";
	else
		pDescription = "Skinning with Vertex Lighting";

	m_Print3D.DisplayDefaultTitle("Chameleon Man", pDescription, ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();

	return true;
}
Esempio n. 17
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
                call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
                PVRShell will also manage important OS events.
                Will also manage relevent OS events. The user has access to
                these events through an abstraction layer provided by PVRShell.
 ******************************************************************************/
bool OGLES2MaximumIntensityBlend::RenderScene()
{
	// Clears the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	
	// Enable blending
	glEnable(GL_BLEND);
	glBlendEquation(GL_MAX_EXT);
	
	/*
	 Calculates the frame number to animate in a time-based manner.
	 Uses the shell function PVRShellGetTime() to get the time in milliseconds.
	 */
	unsigned long ulTime = PVRShellGetTime();
	unsigned long ulDeltaTime = ulTime - m_ulTimePrev;
	m_ulTimePrev	= ulTime;
	m_fFrame	+= (float)ulDeltaTime * DEMO_FRAME_RATE;
	
	if (m_fFrame > m_Scene.nNumFrame-1)
		m_fFrame = 0;	
	
	// Sets the scene animation to this frame
	m_Scene.SetFrame(m_fFrame);
	PVRTVec3 vLightDir;
	
	{
		PVRTVec3	vFrom, vTo, vUp;
		VERTTYPE	fFOV;
		vUp.x = 0.0f;
		vUp.y = 1.0f;
		vUp.z = 0.0f;
		
		// We can get the camera position, target and field of view (fov) with GetCameraPos()
		fFOV = m_Scene.GetCameraPos(vFrom, vTo, 0) * 0.6;
		
		/*
		 We can build the world view matrix from the camera position, target and an up vector.
		 For this we use PVRTMat4LookAtRH().
		 */
		m_mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp);
		
		vLightDir = vFrom;
		
		// Calculates the projection matrix
		bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen);
		m_mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), CAM_NEAR, CAM_FAR, PVRTMat4::OGL, bRotate);
	}
	
	/*
	 A scene is composed of nodes. There are 3 types of nodes:
	 - MeshNodes :
	 references a mesh in the pMesh[].
	 These nodes are at the beginning of the pNode[] array.
	 And there are nNumMeshNode number of them.
	 This way the .pod format can instantiate several times the same mesh
	 with different attributes.
	 - lights
	 - cameras
	 To draw a scene, you must go through all the MeshNodes and draw the referenced meshes.
	 */
	
	for (int i=0; i<(int)m_Scene.nNumMeshNode; i++)
	{
		SPODNode* pNode = &m_Scene.pNode[i];
		
		// Gets pMesh referenced by the pNode
		SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx];
		
		glBindBuffer(GL_ARRAY_BUFFER, m_aiVboID[i]);
		
		// Gets the node model matrix
		PVRTMat4 mWorld;
		mWorld = m_Scene.GetWorldMatrix(*pNode);
		
		PVRTMat4 mWorldView;
		mWorldView = m_mView * mWorld;
		
		// Retrieve the list of required uniforms
		CPVRTPFXEffect* pEffect;
		
		SPODMaterial* pMat = &m_Scene.pMaterial[pNode->nIdxMaterial];
		if(pMat->nIdxTexDiffuse != -1)
		{
			pEffect = m_pEffectTextured;
		}
		else
		{
			pEffect = m_pEffect;
		}
		
		pEffect->Activate();
		const CPVRTArray<SPVRTPFXUniform>& aUniforms = pEffect->GetUniformArray();
		
		/*
		 Now we loop over the uniforms requested by the PFX file.
		 Using the switch statement allows us to handle all of the required semantics
		 */
		for(unsigned int j = 0; j < aUniforms.GetSize(); ++j)
		{
			switch(aUniforms[j].nSemantic)
			{
				case ePVRTPFX_UsPOSITION:
				{
					glVertexAttribPointer(aUniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData);
					glEnableVertexAttribArray(aUniforms[j].nLocation);
				}
					break;
				case ePVRTPFX_UsNORMAL:
				{
					glVertexAttribPointer(aUniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData);
					glEnableVertexAttribArray(aUniforms[j].nLocation);
				}
					break;
				case ePVRTPFX_UsUV:
				{
					glVertexAttribPointer(aUniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
					glEnableVertexAttribArray(aUniforms[j].nLocation);
				}
					break;
				case ePVRTPFX_UsWORLDVIEWPROJECTION:
				{
					PVRTMat4 mWVP;
					
					// Passes the world-view-projection matrix (WVP) to the shader to transform the vertices
					mWVP = m_mProjection * mWorldView;
					glUniformMatrix4fv(aUniforms[j].nLocation, 1, GL_FALSE, mWVP.f);
				}
					break;
				case eUsINTENSITY:
				{
					int iMat           = pNode->nIdxMaterial;
					SPODMaterial* pMat = &m_Scene.pMaterial[iMat];
					float fIntensity   = pMat->pfMatDiffuse[0];			// Take R value for intensity
					
					glUniform1f(aUniforms[j].nLocation, fIntensity);
				}
					break;
				case ePVRTPFX_UsTEXTURE:
				{
					glUniform1i(aUniforms[j].nLocation, 0);
				}
					break;
				case ePVRTPFX_UsWORLDVIEWIT:
				{
					PVRTMat3 mWorldViewIT3x3(mWorldView.inverse().transpose());
					glUniformMatrix3fv(aUniforms[j].nLocation, 1, GL_FALSE, mWorldViewIT3x3.f);
				}
					break;
				case ePVRTPFX_UsLIGHTDIREYE:
				{
					PVRTVec4 vLightDirView = (m_mView * PVRTVec4(-vLightDir, 1.0f)).normalize();
					glUniform3fv(aUniforms[j].nLocation, 1, vLightDirView.ptr());
				}
					break;
			}
		}
		
		/*
		 Now that the model-view matrix is set and the materials ready,
		 call another function to actually draw the mesh.
		 */
		DrawMesh(pMesh);
		glBindBuffer(GL_ARRAY_BUFFER, 0);
		
		/*
		 Now disable all of the enabled attribute arrays that the PFX requested.
		 */
		for(unsigned int j = 0; j < aUniforms.GetSize(); ++j)
		{
			switch(aUniforms[j].nSemantic)
			{
				case ePVRTPFX_UsNORMAL:
				case ePVRTPFX_UsUV:
				case ePVRTPFX_UsPOSITION:
				{
					glDisableVertexAttribArray(aUniforms[j].nLocation);
				}
					break;
			}
		}
	}
	
	// Reset blending
	// Enable blending
	glBlendEquation(GL_FUNC_ADD);
	glDisable(GL_BLEND);
	
	// Determine which title to show. The default title is quite long, so we display a shortened version if
	// it cannot fit on the screen.
	const char* pszTitle = NULL;
	{
		bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen);
		
		float fW, fH;
		m_Print3D.MeasureText(&fW, &fH, 1.0f, c_pszTitle);
		
		int iScreenW = bRotate ? PVRShellGet(prefHeight) : PVRShellGet(prefWidth);
		if((int)fW >= iScreenW)
		{
			pszTitle = c_pszTitleShort;
		}
		else
		{
			pszTitle = c_pszTitle;
		}
	}
	
	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle(pszTitle, "", ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();
	
	return true;
}
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2PVRScopeExample::RenderScene()
{
	// Keyboard input (cursor up/down to cycle through counters)
	if(PVRShellIsKeyPressed(PVRShellKeyNameUP))
	{
		m_i32Counter++;

		if(m_i32Counter > (int) m_pScopeGraph->GetCounterNum())
			m_i32Counter = m_pScopeGraph->GetCounterNum();
	}

	if(PVRShellIsKeyPressed(PVRShellKeyNameDOWN))
	{
		m_i32Counter--;

		if(m_i32Counter < 0)
			m_i32Counter = 0;
	}

	if(PVRShellIsKeyPressed(PVRShellKeyNameACTION2))
		m_pScopeGraph->ShowCounter(m_i32Counter, !m_pScopeGraph->IsCounterShown(m_i32Counter));

	// Keyboard input (cursor left/right to change active group)
	if(PVRShellIsKeyPressed(PVRShellKeyNameRIGHT))
	{
		m_pScopeGraph->SetActiveGroup(m_pScopeGraph->GetActiveGroup()+1);
	}

	if(PVRShellIsKeyPressed(PVRShellKeyNameLEFT))
	{
		m_pScopeGraph->SetActiveGroup(m_pScopeGraph->GetActiveGroup()-1);
	}

	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use shader program
	glUseProgram(m_ShaderProgram.uiId);

	// Bind texture
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiTexture);

	// Rotate and Translation the model matrix
	PVRTMat4 mModel;
	mModel = PVRTMat4::RotationY(m_fAngleY);
	m_fAngleY += (2*PVRT_PI/60)/7;

	// Set model view projection matrix
	PVRTMat4 mModelView, mMVP;
	mModelView = m_mView * mModel;
	mMVP =  m_mProjection * mModelView;
	glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.ptr());

	// Set light direction in model space
	PVRTVec4 vLightDirModel;
	vLightDirModel = mModel.inverse() * PVRTVec4(1, 1, 1, 0);

	glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, &vLightDirModel.x);

	// Set eye position in model space
	PVRTVec4 vEyePosModel;
	vEyePosModel = mModelView.inverse() * PVRTVec4(0, 0, 0, 1);
	glUniform3fv(m_ShaderProgram.uiEyePosLoc, 1, &vEyePosModel.x);

	/*
		Set the iridescent shading parameters
	*/
	// Set the minimum thickness of the coating in nm
	glUniform1f(m_ShaderProgram.uiMinThicknessLoc, m_fMinThickness);

	// Set the maximum variation in thickness of the coating in nm
	glUniform1f(m_ShaderProgram.uiMaxVariationLoc, m_fMaxVariation);

	/*
		Now that the uniforms are set, call another function to actually draw the mesh.
	*/
	DrawMesh(0);

	char Description[256];

	if(m_pScopeGraph->GetCounterNum())
	{
		sprintf(Description, "Active Grp %i\n\nCounter %i (Grp %i) \nName: %s\nShown: %s\nuser y-axis: %.2f  max: %.2f%s",
			m_pScopeGraph->GetActiveGroup(), m_i32Counter,
			m_pScopeGraph->GetCounterGroup(m_i32Counter),
			m_pScopeGraph->GetCounterName(m_i32Counter),
			m_pScopeGraph->IsCounterShown(m_i32Counter) ? "Yes" : "No",
			m_pScopeGraph->GetMaximum(m_i32Counter),
			m_pScopeGraph->GetMaximumOfData(m_i32Counter),
			m_pScopeGraph->IsCounterPercentage(m_i32Counter) ? "%%" : "");
	}
	else
	{
		sprintf(Description, "No counters present");
	}

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("PVRScopeExample", Description, ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();

	// Update counters and draw the graph
	m_pScopeGraph->Ping();

	return true;
}
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2LightMap::RenderScene()
{
	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use shader program
	glUseProgram(m_ShaderProgram.uiId);

	// Bind textures
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiBaseTex);
	glActiveTexture(GL_TEXTURE1);
	glBindTexture(GL_TEXTURE_2D, m_uiReflectTex);
	glActiveTexture(GL_TEXTURE2);
	glBindTexture(GL_TEXTURE_2D, m_uiShadowTex);

	// draw two models, mask and plane
	for (int i = 0; i < eNumModels; ++i)
	{
		// rotate and translate the model matrix
		PVRTMat4 mModel;

		if (i == eMask)
		{
			PVRTMat4 mRotX, mRotY;
			mRotX = PVRTMat4::RotationX(m_fAngleX);
			m_fAngleX += PVRT_PI / 300;
			mRotY = PVRTMat4::RotationY(m_fAngleY);
			m_fAngleY += PVRT_PI / 250;

			mModel = mRotY * mRotX;
		}
		else
		{
			mModel = PVRTMat4::Translation(0.0, 0.0, -25);
		}

		// Set model view projection matrix
		PVRTMat4 mModelView, mMVP;
		mModelView = m_mView * mModel;
		mMVP = m_mProjection * mModelView;
		glUniformMatrix4fv(m_ShaderProgram.auiLoc[eMVPMatrix], 1, GL_FALSE, mMVP.ptr());

		// Set shadow projection matrix
		PVRTMat4 mShadowProj;
		mShadowProj = m_mShadowViewProj * mModel;
		glUniformMatrix4fv(m_ShaderProgram.auiLoc[eShadowProj], 1, GL_FALSE, mShadowProj.ptr());

		// Set model world matrix
		PVRTMat3 fModelWorld = PVRTMat3(mModel);

		glUniformMatrix3fv(m_ShaderProgram.auiLoc[eModelWorld], 1, GL_FALSE, fModelWorld.ptr());

		// Set light position in model space
		PVRTVec4 vLightDirModel;
		vLightDirModel =  mModel.inverse() *  PVRTVec4( 1, 1, 1, 0 );

		glUniform3fv(m_ShaderProgram.auiLoc[eLightDirModel], 1, &vLightDirModel.x);

		// Set eye position in model space
		PVRTVec4 vEyePosModel;
		vEyePosModel = mModelView.inverse() * PVRTVec4(0, 0, 0, 1);

		glUniform3fv(m_ShaderProgram.auiLoc[eEyePosModel], 1, &vEyePosModel.x);

		m_Models[i].DrawMesh(0);
	}

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("LightMap", "", ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();

	return true;
}
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES3Skinning::RenderScene()
{
	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use shader program
	glUseProgram(m_ShaderProgram.uiId);
	glActiveTexture(GL_TEXTURE0);

	/*
		Calculates the frame number to animate in a time-based manner.
		Uses the shell function PVRShellGetTime() to get the time in milliseconds.
	*/
	unsigned long iTime = PVRShellGetTime();

	if(iTime > m_iTimePrev)
	{
		float fDelta = (float) (iTime - m_iTimePrev);
		m_fFrame += fDelta * g_fDemoFrameRate;

		// Modify the transformation matrix if it is needed
		bool bRebuildTransformation = false;

		if(PVRShellIsKeyPressed(PVRShellKeyNameRIGHT))
		{
			m_fAngle -= 0.03f;

			if(m_fAngle < PVRT_TWO_PIf)
				m_fAngle += PVRT_TWO_PIf;

			bRebuildTransformation = true;
		}

		if(PVRShellIsKeyPressed(PVRShellKeyNameLEFT))
		{
			m_fAngle += 0.03f;

			if(m_fAngle > PVRT_TWO_PIf)
				m_fAngle -= PVRT_TWO_PIf;

			bRebuildTransformation = true;
		}

		if(PVRShellIsKeyPressed(PVRShellKeyNameUP))
		{
			m_fDistance -= 10.0f;

			if(m_fDistance < -500.0f)
				m_fDistance = -500.0f;

			bRebuildTransformation = true;
		}

		if(PVRShellIsKeyPressed(PVRShellKeyNameDOWN))
		{
			m_fDistance += 10.0f;

			if(m_fDistance > 200.0f)
				m_fDistance = 200.0f;

			bRebuildTransformation = true;
		}

		if(bRebuildTransformation)
			m_Transform = PVRTMat4::Translation(0,0, m_fDistance) * PVRTMat4::RotationY(m_fAngle);

	}

	m_iTimePrev	= iTime;

	if(m_fFrame > m_Scene.nNumFrame - 1)
		m_fFrame = 0;

	// Set the scene animation to the current frame
	m_Scene.SetFrame(m_fFrame);

	/*
		Set up camera
	*/
	PVRTVec3	vFrom, vTo, vUp(0, 1, 0);
	PVRTMat4 mView, mProjection;
	float fFOV;

	// We can get the camera position, target and field of view (fov) with GetCameraPos()
	fFOV = m_Scene.GetCamera(vFrom, vTo, vUp, 0);

	/*
		We can build the model view matrix from the camera position, target and an up vector.
		For this we use PVRTMat4::LookAtRH().
	*/
	mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp);

	// Calculate the projection matrix
	bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen);
	mProjection = PVRTMat4::PerspectiveFovRH(fFOV,  (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), g_fCameraNear, g_fCameraFar, PVRTMat4::OGL, bRotate);

	// Read the light direction from the scene
	PVRTVec4 vLightDirWorld = PVRTVec4( 0, 0, 0, 0 );
	vLightDirWorld = m_Scene.GetLightDirection(0);
	glUniform3fv(m_ShaderProgram.auiLoc[eLightDirWorld], 1, &vLightDirWorld.x);

	// Set up the View * Projection Matrix
	PVRTMat4 mViewProjection;

	mViewProjection = mProjection * mView;
	glUniformMatrix4fv(m_ShaderProgram.auiLoc[eViewProj], 1, GL_FALSE, mViewProjection.ptr());

	/*
		A scene is composed of nodes. There are 3 types of nodes:
		- MeshNodes :
			references a mesh in the pMesh[].
			These nodes are at the beginning of the pNode[] array.
			And there are nNumMeshNode number of them.
			This way the .pod format can instantiate several times the same mesh
			with different attributes.
		- lights
		- cameras
		To draw a scene, you must go through all the MeshNodes and draw the referenced meshes.
	*/
	for (unsigned int i32NodeIndex = 0; i32NodeIndex < m_Scene.nNumMeshNode; ++i32NodeIndex)
	{
		SPODNode& Node = m_Scene.pNode[i32NodeIndex];

		// Get the node model matrix
		PVRTMat4 mWorld;
		mWorld = m_Scene.GetWorldMatrix(Node);

		// Set up shader uniforms
		PVRTMat4 mModelViewProj;
		mModelViewProj = mViewProjection * mWorld;
		glUniformMatrix4fv(m_ShaderProgram.auiLoc[eMVPMatrix], 1, GL_FALSE, mModelViewProj.ptr());

		PVRTVec4 vLightDirModel;
		vLightDirModel = mWorld.inverse() * vLightDirWorld;
		glUniform3fv(m_ShaderProgram.auiLoc[eLightDirModel], 1, &vLightDirModel.x);

		// Loads the correct texture using our texture lookup table
		if(Node.nIdxMaterial == -1)
			glBindTexture(GL_TEXTURE_2D, 0); // It has no pMaterial defined. Use blank texture (0)
		else
			glBindTexture(GL_TEXTURE_2D, m_puiTextures[Node.nIdxMaterial]);

		DrawMesh(i32NodeIndex);
	}

	// Display the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("Skinning", "", ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();

	return true;
}
Esempio n. 21
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occurred
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevant OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES3DisplacementMap::RenderScene()
{
	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);


	//	Calculates the the time since the last frame
	unsigned long ulTime = PVRShellGetTime();
	unsigned long ulDeltaTime = ulTime - m_ulTimePrev;
	m_ulTimePrev = ulTime;

	// Use shader program
	glUseProgram(m_ShaderProgram.uiId);

	// Enable 2D texturing for the first texture.
	glActiveTexture(GL_TEXTURE0);

	// Set the sampler2D variable to the first texture unit
	glUniform1i(m_ShaderProgram.uiTexture, 0);

	// Enable 2D texturing for the second texture.
	glActiveTexture(GL_TEXTURE1);

	// Set the displacement map variable to the second texture unit
	glUniform1i(m_ShaderProgram.uiDisMap, 1);

	// Calculate and set the displacement factor
	if(m_bGrow)
	{
		m_DisplacementFactor += (float)ulDeltaTime * g_fDemoFrameRate;

		if(m_DisplacementFactor > 25.0f)
		{
			m_bGrow = false;
			m_DisplacementFactor = 25.0f;
		}
	}
	else
	{
		m_DisplacementFactor -= (float)ulDeltaTime * g_fDemoFrameRate;

		if(m_DisplacementFactor < 0.0f)
		{
			m_bGrow = true;
			m_DisplacementFactor = 0.0f;
		}
	}

	glUniform1f(m_ShaderProgram.uiDisplacementFactor, m_DisplacementFactor);

	// Bind the displacement map texture
	glBindTexture(GL_TEXTURE_2D, m_uiDisMapID);

	// Now the displacement map texture is bound set the active texture to texture 0
	glActiveTexture(GL_TEXTURE0);

	// Draw the scene

	// Enable the vertex attribute arrays
	glEnableVertexAttribArray(VERTEX_ARRAY);
	glEnableVertexAttribArray(NORMAL_ARRAY);
	glEnableVertexAttribArray(TEXCOORD_ARRAY);

	for(unsigned int i = 0; i < m_Scene.nNumMeshNode; ++i)
	{
		SPODNode& Node = m_Scene.pNode[i];

		// Get the node model matrix
		PVRTMat4 mWorld;
		mWorld = m_Scene.GetWorldMatrix(Node);

		// Pass the model-view-projection matrix (MVP) to the shader to transform the vertices
		PVRTMat4 mModelView, mMVP;
		mModelView = m_View * mWorld;
		mMVP = m_Projection * mModelView;
		glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.f);

		// Pass the light direction in model space to the shader
		PVRTVec4 vLightDir;
		vLightDir = mWorld.inverse() * m_LightDir;

		PVRTVec3 vLightDirModel = *(PVRTVec3*) vLightDir.ptr();
		vLightDirModel.normalize();

		glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, &vLightDirModel.x);

		// Load the correct texture for the mesh using our texture lookup table
		GLuint uiTex = 0;

		if(Node.nIdxMaterial != -1)
			uiTex = m_puiTextureIDs[Node.nIdxMaterial];

		glBindTexture(GL_TEXTURE_2D, uiTex);

		/*
			Now that the model-view matrix is set and the materials ready,
			call another function to actually draw the mesh.
		*/
		DrawMesh(i);
	}

	// Safely disable the vertex attribute arrays
	glDisableVertexAttribArray(VERTEX_ARRAY);
	glDisableVertexAttribArray(NORMAL_ARRAY);
	glDisableVertexAttribArray(TEXCOORD_ARRAY);

	glBindBuffer(GL_ARRAY_BUFFER, 0);
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

	// Display the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("DisplacementMapping", "", ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();

	return true;
}
Esempio n. 22
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES3IntroducingPOD::RenderScene()
{
	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use shader program
	glUseProgram(m_ShaderProgram.uiId);

	/*
		Calculates the frame number to animate in a time-based manner.
		Uses the shell function PVRShellGetTime() to get the time in milliseconds.
	*/
	unsigned long ulTime = PVRShellGetTime();

	if(m_ulTimePrev > ulTime)
		m_ulTimePrev = ulTime;

	unsigned long ulDeltaTime = ulTime - m_ulTimePrev;
	m_ulTimePrev	= ulTime;
	m_fFrame += (float)ulDeltaTime * g_fDemoFrameRate;
	if (m_fFrame > m_Scene.nNumFrame - 1) m_fFrame = 0;

	// Sets the scene animation to this frame
	m_Scene.SetFrame(m_fFrame);

	/*
		Get the direction of the first light from the scene.
	*/
	PVRTVec4 vLightDirection;
	vLightDirection = m_Scene.GetLightDirection(0);
	// For direction vectors, w should be 0
	vLightDirection.w = 0.0f;

	/*
		Set up the view and projection matrices from the camera
	*/
	PVRTMat4 mView, mProjection;
	PVRTVec3	vFrom, vTo(0.0f), vUp(0.0f, 1.0f, 0.0f);
	float fFOV;

	// Setup the camera

	// Camera nodes are after the mesh and light nodes in the array
	int i32CamID = m_Scene.pNode[m_Scene.nNumMeshNode + m_Scene.nNumLight + g_ui32Camera].nIdx;

	// Get the camera position, target and field of view (fov)
	if(m_Scene.pCamera[i32CamID].nIdxTarget != -1) // Does the camera have a target?
		fFOV = m_Scene.GetCameraPos( vFrom, vTo, g_ui32Camera); // vTo is taken from the target node
	else
		fFOV = m_Scene.GetCamera( vFrom, vTo, vUp, g_ui32Camera); // vTo is calculated from the rotation

	// We can build the model view matrix from the camera position, target and an up vector.
	// For this we use PVRTMat4::LookAtRH()
	mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp);

	// Calculate the projection matrix
	bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen);
	mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), g_fCameraNear, g_fCameraFar, PVRTMat4::OGL, bRotate);

	/*
		A scene is composed of nodes. There are 3 types of nodes:
		- MeshNodes :
			references a mesh in the pMesh[].
			These nodes are at the beginning of the pNode[] array.
			And there are nNumMeshNode number of them.
			This way the .pod format can instantiate several times the same mesh
			with different attributes.
		- lights
		- cameras
		To draw a scene, you must go through all the MeshNodes and draw the referenced meshes.
	*/
	for (unsigned int i = 0; i < m_Scene.nNumMeshNode; ++i)
	{
		SPODNode& Node = m_Scene.pNode[i];

		// Get the node model matrix
		PVRTMat4 mWorld;
		mWorld = m_Scene.GetWorldMatrix(Node);

		// Pass the model-view-projection matrix (MVP) to the shader to transform the vertices
		PVRTMat4 mModelView, mMVP;
		mModelView = mView * mWorld;
		mMVP = mProjection * mModelView;
		glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.f);

		// Pass the light direction in model space to the shader
		PVRTVec4 vLightDir;
		vLightDir = mWorld.inverse() * vLightDirection;

		PVRTVec3 vLightDirModel = *(PVRTVec3*)&vLightDir;
		vLightDirModel.normalize();

		glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, &vLightDirModel.x);

		// Load the correct texture using our texture lookup table
		GLuint uiTex = 0;

		if(Node.nIdxMaterial != -1)
			uiTex = m_puiTextureIDs[Node.nIdxMaterial];

		glBindTexture(GL_TEXTURE_2D, uiTex);

		/*
			Now that the model-view matrix is set and the materials are ready,
			call another function to actually draw the mesh.
		*/
		DrawMesh(i);
	}

	// Display the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("IntroducingPOD", "", ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();

	return true;
}
Esempio n. 23
0
// ---------------------------------------------------------------
void MyPVRDemo::RenderCurch(const PVRTMat4& mxCam)
	{
	PVRTMat4 mxModel = PVRTMat4::Identity();
	PVRTMat4 mxModelView = mxCam * mxModel;
	PVRTMat4 mxTexProj = m_mxLightBias * m_mxLightProj * m_mxLightView * mxCam.inverse();

	// --- Draw the floor reflected first, so we don't have to swap between GPU programs
	glUseProgram(m_ChurchReflShader.uiID);
	// Base map
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_tex[enumTEXTURE_ChurchWalls]);
	// Light map
	glActiveTexture(GL_TEXTURE2);
	glBindTexture(GL_TEXTURE_2D, m_tex[enumTEXTURE_ChurchLightmap]);

	glCullFace(GL_FRONT);
	PVRTMat4 mxReflChurchView = mxCam * PVRTMat4::Scale(1, -1, 1);
	glUniformMatrix4fv(m_ChurchReflShader.uiProjection, 1, GL_FALSE, m_mxProjection.ptr());
	glUniformMatrix4fv(m_ChurchReflShader.uiModelView, 1, GL_FALSE, mxReflChurchView.ptr());	// Reflected ModelView matrix
	DrawMesh(enumMODEL_Church, FLAG_VRT | FLAG_TEX0 | FLAG_TEX1);
	glCullFace(GL_BACK);

	// --- Activate the Church shader which utilises the Shadow Map.
	glUseProgram(m_ChurchShader.uiID);
	
	
	// --- Use the Shadow Map texture in texture unit 1
	glActiveTexture(GL_TEXTURE1);
	glBindTexture(GL_TEXTURE_2D, m_uiShadowMapTex);
	
	// --- Upload projection matrices
	glUniformMatrix4fv(m_ChurchShader.uiProjection, 1, GL_FALSE, m_mxProjection.ptr());	
	glUniformMatrix4fv(m_ChurchShader.uiTexProjection, 1, GL_FALSE, mxTexProj.ptr());
	glUniform1f(m_ChurchShader.uiAlpha, 1.0f);	// Set no alpha while we draw the walls.

	// --- Draw church walls	 (Textures are already bound)
	// Draw the walls as normal
	glUniformMatrix4fv(m_ChurchShader.uiModelView, 1, GL_FALSE, mxModelView.ptr());		// Standard ModelView matrix
	DrawMesh(enumMODEL_Church, FLAG_VRT | FLAG_TEX0 | FLAG_TEX1);

	// --- Draw floor
	glEnable(GL_BLEND);
	// Base map
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_tex[enumTEXTURE_Floor]);
	// Light map
	glActiveTexture(GL_TEXTURE2);
	glBindTexture(GL_TEXTURE_2D, m_tex[enumTEXTURE_FloorLightmap]);
	
	// Draw the floor
	glUniform1f(m_ChurchShader.uiAlpha, FLOOR_ALPHA);
	glUniformMatrix4fv(m_ChurchShader.uiModelView, 1, GL_FALSE, mxModelView.ptr());		// Standard ModelView matrix
	DrawMesh(enumMODEL_Floor, FLAG_VRT | FLAG_TEX0 | FLAG_TEX1);

	glBindTexture(GL_TEXTURE_2D, 0);
	glActiveTexture(GL_TEXTURE1);
	glBindTexture(GL_TEXTURE_2D, 0);
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, 0);
	glDisable(GL_BLEND);
	}
Esempio n. 24
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occurred
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevant OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES3Skybox2::RenderScene()
{
	unsigned int i, j;

	// Clears the colour and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	/*
		Calculates the frame number to animate in a time-based manner.
		Uses the shell function PVRShellGetTime() to get the time in milliseconds.
	*/

	unsigned long iTime = PVRShellGetTime();

	if(!bPause)
	{
		// Calculate the model view matrix turning around the balloon
		ComputeViewMatrix();

		if(iTime > m_iTimePrev)
		{
			float fDelta = (float) (iTime - m_iTimePrev) * g_fFrameRate;
			m_fFrame   += fDelta;
			fDemoFrame += fDelta;
			fBurnAnim  += fDelta * 0.02f;

			if(fBurnAnim >= 1.0f)
				fBurnAnim = 1.0f;
		}
	}

	m_iTimePrev	= iTime;

	/* KeyBoard input processing */

	if(PVRShellIsKeyPressed(PVRShellKeyNameACTION1))
		bPause=!bPause;

	if(PVRShellIsKeyPressed(PVRShellKeyNameACTION2))
		fBurnAnim = 0.0f;

	/* Keyboard Animation and Automatic Shader Change over time */
	if(!bPause && (fDemoFrame > 500 || (m_i32Effect == 2 && fDemoFrame > 80)))
	{
		if(++m_i32Effect >= (int) g_ui32NoOfEffects)
		{
			m_i32Effect = 1;
			m_fFrame = 0.0f;
		}

		fDemoFrame = 0.0f;
		fBurnAnim  = 0.0f;
	}

	/* Change Shader Effect */

	if(PVRShellIsKeyPressed(PVRShellKeyNameRIGHT))
	{
		if(++m_i32Effect >= (int) g_ui32NoOfEffects)
			m_i32Effect = 1;

		fDemoFrame = 0.0f;
		fBurnAnim  = 0.0f;
		m_fFrame = 0.0f;
	}
	if(PVRShellIsKeyPressed(PVRShellKeyNameLEFT))
	{
		if(--m_i32Effect < 1)
			m_i32Effect = g_ui32NoOfEffects - 1;

		fDemoFrame = 0.0f;
		fBurnAnim  = 0.0f;
		m_fFrame = 0.0f;
	}

	/* Change Skybox Texture */
	if(PVRShellIsKeyPressed(PVRShellKeyNameUP))
	{
		for(i = 0; i < g_ui32NoOfEffects; ++i)
			ChangeSkyboxTo(m_ppEffects[i], m_ui32TextureIDs[4]);

		fBurnAnim = 0.0f;
	}

	if(PVRShellIsKeyPressed(PVRShellKeyNameDOWN))
	{
		for(i = 0; i < g_ui32NoOfEffects; ++i)
			ChangeSkyboxTo(m_ppEffects[i], m_ui32TextureIDs[3]);

		fBurnAnim = 0.0f;
	}

	/* Setup Shader and Shader Constants */
	int location;

	glDisable(GL_CULL_FACE);

	DrawSkybox();

	glEnable(GL_CULL_FACE);

	m_ppEffects[m_i32Effect]->Activate();

	for(i = 0; i < m_Scene.nNumMeshNode; i++)
	{
		SPODNode* pNode = &m_Scene.pNode[i];

		// Gets pMesh referenced by the pNode
		SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx];

		// Gets the node model matrix
		PVRTMat4 mWorld, mWORLDVIEW;
		mWorld = m_Scene.GetWorldMatrix(*pNode);

		mWORLDVIEW = m_mView * mWorld;

		glBindBuffer(GL_ARRAY_BUFFER, m_aiVboID[i]);

		const CPVRTArray<SPVRTPFXUniform>& Uniforms = m_ppEffects[m_i32Effect]->GetUniformArray();
		for(j = 0; j < Uniforms.GetSize(); ++j)
		{
			switch(Uniforms[j].nSemantic)
			{
				case ePVRTPFX_UsPOSITION:
				{
					glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData);
					glEnableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
				case ePVRTPFX_UsNORMAL:
				{
					glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData);
					glEnableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
				case ePVRTPFX_UsUV:
				{
					glVertexAttribPointer(Uniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
					glEnableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
				case ePVRTPFX_UsWORLDVIEWPROJECTION:
				{
					PVRTMat4 mMVP;

					/* Passes the model-view-projection matrix (MVP) to the shader to transform the vertices */
					mMVP = m_mProjection * mWORLDVIEW;
					glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mMVP.f);
				}
				break;
				case ePVRTPFX_UsWORLDVIEW:
				{
					glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mWORLDVIEW.f);
				}
				break;
				case ePVRTPFX_UsWORLDVIEWIT:
				{
					PVRTMat4 mWORLDVIEWI, mWORLDVIEWIT;

					mWORLDVIEWI = mWORLDVIEW.inverse();
					mWORLDVIEWIT= mWORLDVIEWI.transpose();

					PVRTMat3 WORLDVIEWIT = PVRTMat3(mWORLDVIEWIT);

					glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, WORLDVIEWIT.f);
				}
				break;
				case ePVRTPFX_UsVIEWIT:
				{
					PVRTMat4 mViewI, mViewIT;

					mViewI  = m_mView.inverse();
					mViewIT = mViewI.transpose();

					PVRTMat3 ViewIT = PVRTMat3(mViewIT);

					glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, ViewIT.f);
				}
				break;
				case ePVRTPFX_UsLIGHTDIREYE:
				{
					PVRTVec4 vLightDirectionEyeSpace;

					// Passes the light direction in eye space to the shader
					vLightDirectionEyeSpace = m_mView * PVRTVec4(1.0,1.0,-1.0,0.0);
					glUniform3f(Uniforms[j].nLocation, vLightDirectionEyeSpace.x, vLightDirectionEyeSpace.y, vLightDirectionEyeSpace.z);
				}
				break;
				case ePVRTPFX_UsTEXTURE:
				{
					// Set the sampler variable to the texture unit
					glUniform1i(Uniforms[j].nLocation, Uniforms[j].nIdx);
				}
				break;
			}
		}

		location = glGetUniformLocation(m_ppEffects[m_i32Effect]->GetProgramHandle(), "myEyePos");

		if(location != -1)
			glUniform3f(location, vCameraPosition.x, vCameraPosition.y, vCameraPosition.z);

		//set animation
		location = glGetUniformLocation(m_ppEffects[m_i32Effect]->GetProgramHandle(), "fAnim");

		if(location != -1)
			glUniform1f(location, fBurnAnim);

		location = glGetUniformLocation(m_ppEffects[m_i32Effect]->GetProgramHandle(), "myFrame");

		if(location != -1)
			glUniform1f(location, m_fFrame);

		if(g_bBlendShader[m_i32Effect])
		{
			glEnable(GL_BLEND);

			// Correct render order for alpha blending through culling
			// Draw Back faces
			glCullFace(GL_FRONT);

			location = glGetUniformLocation(m_ppEffects[m_i32Effect]->GetProgramHandle(), "bBackFace");

			glUniform1i(location, 1);

			DrawMesh(pMesh);

			glUniform1i(location, 0);

			glCullFace(GL_BACK);
		}
		else
		{
			location = glGetUniformLocation(m_ppEffects[m_i32Effect]->GetProgramHandle(), "bBackFace");

			if(location != -1)
				glUniform1i(location, 0);

			glDisable(GL_BLEND);
		}

		/* Everything should now be setup, therefore draw the mesh*/
		DrawMesh(pMesh);

		glBindBuffer(GL_ARRAY_BUFFER, 0);

		for(j = 0; j < Uniforms.GetSize(); ++j)
		{
			switch(Uniforms[j].nSemantic)
			{
			case ePVRTPFX_UsPOSITION:
				{
					glDisableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
			case ePVRTPFX_UsNORMAL:
				{
					glDisableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
			case ePVRTPFX_UsUV:
				{
					glDisableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
			}
		}
	}

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	if(!bPause)
		m_Print3D.DisplayDefaultTitle("Skybox2", "", ePVRTPrint3DSDKLogo);
	else
		m_Print3D.DisplayDefaultTitle("Skybox2", "Paused", ePVRTPrint3DSDKLogo);

	m_Print3D.Flush();

	return true;
}
Esempio n. 25
0
void OGLES2FilmTV::DrawPODScene(PVRTMat4 &mViewProjection, bool bDrawCamera)
{
	// Clear the colour and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Get the position of the first light from the scene.
	PVRTVec4 vLightPosition = m_Scene.GetLightPosition(0);

	for(unsigned int i = 0; i < m_Scene.nNumMeshNode; ++i)
	{
		SPODNode& Node = m_Scene.pNode[i];

		// Get the node model matrix
		PVRTMat4 mWorld = m_Scene.GetWorldMatrix(Node);

		if(i == g_ui32CameraMesh)
		{
			if(!bDrawCamera)
				continue;

			// Rotate camera model
			mWorld =  m_MiniCamView.inverse() * mWorld;
		}
		else if(i == g_ui32TvScreen) // If we're drawing the TV screen change to the black and white shader
		{
			glUseProgram(m_BWShaderProgram.uiId);
		}

		// Pass the model-view-projection matrix (MVP) to the shader to transform the vertices
		PVRTMat4 mModelView, mMVP;
		mMVP = mViewProjection * mWorld;
		glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.f);

		// Pass the light position in model space to the shader
		PVRTVec4 vLightPos;
		vLightPos = mWorld.inverse() * vLightPosition;

		glUniform3fv(m_ShaderProgram.uiLightPosLoc, 1, &vLightPos.x);

		// Load the correct texture using our texture lookup table
		GLuint uiTex = 0;

		if(Node.nIdxMaterial != -1)
		{
            if(m_bFBOsCreated && Node.nIdxMaterial == m_uiTVScreen && m_i32Frame != 0)
				uiTex = m_uiTexture[1 - m_i32CurrentFBO];
			else
				uiTex = m_puiTextureIDs[Node.nIdxMaterial];
		}

		glBindTexture(GL_TEXTURE_2D, uiTex);

		/*
			Now that the model-view matrix is set and the materials ready,
			call another function to actually draw the mesh.
		*/
		DrawMesh(Node.nIdx);

		if(i == g_ui32TvScreen)
		{
			// Change back to the normal shader after drawing the g_ui32TvScreen
			glUseProgram(m_ShaderProgram.uiId);
		}
	}
}
Esempio n. 26
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2Bloom::RenderScene()
{
	HandleInput();

	// Calculate the mask and light rotation based on the passed time
	static unsigned long ulPreviousTime = PVRShellGetTime();
	unsigned long ulNowTime = PVRShellGetTime();
	m_fRotation += PVRT_PI * (ulNowTime - ulPreviousTime) * 0.0002f;
	ulPreviousTime = ulNowTime;
	if (m_fRotation > (PVRT_PI * 2.0f))
		m_fRotation -= PVRT_PI * 2.0f;
	
	// Calculate the model, view and projection matrix
	float fModelAngleY = m_fRotation;
	float fLightAngleY = -m_fRotation;

	PVRTMat4 mWorld = PVRTMat4::RotationY(fModelAngleY);
	PVRTMat4 mLight = PVRTMat4::RotationY(fLightAngleY);
	PVRTMat4 mView = PVRTMat4::LookAtRH(PVRTVec3(0, 0, 150), PVRTVec3(0, 0, 0), PVRTVec3(0, 1, 0));

	bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen);
	PVRTMat4 mProjection = PVRTMat4::PerspectiveFovRH(g_fCameraFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), g_fCameraNear, g_fCameraFar, PVRTMat4::OGL, bRotate);
	PVRTMat4 mMVP = mProjection * mView * mWorld;

	// Simple rotating directional light in model-space
	PVRTVec4 vMsLightPos = mWorld.inverse() * mLight * PVRTVec4(0.5f, -1, -0.5f, 0).normalize();

	glBindFramebuffer(GL_FRAMEBUFFER, m_i32OriginalFbo);
	glClearColor(0.075f, 0.1f, 0.125f, 0.0f);
	glViewport(0, 0, PVRShellGet(prefWidth), PVRShellGet(prefHeight));
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use simple shader program to render the mask
	glUseProgram(m_ShaderProgram.uiId);
	glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.f);
	glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, &vMsLightPos.x);

	// Draw the mesh
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiBaseTex);
	DrawMesh(0);

	if (m_bApplyBloom)
	{
		// First render the objects which shall have the bloom effect to a texture
		glBindFramebuffer(GL_FRAMEBUFFER, m_uiBlurFramebufferObjects[0]);
		glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
		glViewport(0, 0, m_i32TexSize, m_i32TexSize);
		glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

		glUseProgram(m_PreBloomShaderProgram.uiId);
		glUniformMatrix4fv(m_PreBloomShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.f);
		glUniform3fv(m_PreBloomShaderProgram.uiLightDirLoc, 1, &vMsLightPos.x);
		glUniform1f(m_PreBloomShaderProgram.uiBloomIntensity, m_fBloomIntensity);

		// Draw the mesh
		glActiveTexture(GL_TEXTURE0);
		glBindTexture(GL_TEXTURE_2D, m_uiBloomMappingTexture);
		DrawMesh(0);

		if(m_bDiscard) // Was GL_EXT_discard_framebuffer supported?
		{
			//Give the drivers a hint that we don't want depth information to be stored for later.
			const GLenum attachment = GL_DEPTH_ATTACHMENT;
			m_Extensions.glDiscardFramebufferEXT(GL_FRAMEBUFFER, 1, &attachment);
		}
		/*
		  Blur the generated image n-times
		*/
		for (unsigned int i=0; i < m_ui32BlurPasses; i++)
		{
			/*
			 Apply horizontal blur
			*/
			glBindFramebuffer(GL_FRAMEBUFFER, m_uiBlurFramebufferObjects[1]);
			glViewport(0, 0, m_i32TexSize, m_i32TexSize);
			glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

			glActiveTexture(GL_TEXTURE0);
			glBindTexture(GL_TEXTURE_2D, m_uiBlurTextures[0]);

			// Use the shader program for the scene
			glUseProgram(m_BlurShaderProgram.uiId);
			glUniform1f(m_BlurShaderProgram.uiTexelOffsetX, m_fTexelOffset);
			glUniform1f(m_BlurShaderProgram.uiTexelOffsetY, 0.0f);

			DrawAxisAlignedQuad(PVRTVec2(-1, -1), PVRTVec2(1, 1));


			//No attachments we can invalidate here, as only color was used which is necessary.
			/*
			   Apply vertical blur
			*/
			glBindFramebuffer(GL_FRAMEBUFFER, m_uiBlurFramebufferObjects[0]);
			glViewport(0, 0, m_i32TexSize, m_i32TexSize);
			glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

			glActiveTexture(GL_TEXTURE0);
			glBindTexture(GL_TEXTURE_2D, m_uiBlurTextures[1]);

			// Use the shader program for the scene
			glUseProgram(m_BlurShaderProgram.uiId);
			glUniform1f(m_BlurShaderProgram.uiTexelOffsetX, 0.0f);
			glUniform1f(m_BlurShaderProgram.uiTexelOffsetY, m_fTexelOffset);

			DrawAxisAlignedQuad(PVRTVec2(-1, -1), PVRTVec2(1, 1));

			if(m_bDiscard) // Was GL_EXT_discard_framebuffer supported?
			{
				//Give the drivers a hint that we don't want depth information to be stored for later.
				const GLenum attachment = GL_DEPTH_ATTACHMENT;
				m_Extensions.glDiscardFramebufferEXT(GL_FRAMEBUFFER, 1, &attachment);
			}
		}

		/*
		  Draw scene with bloom
		*/
		glBindFramebuffer(GL_FRAMEBUFFER, m_i32OriginalFbo);
		glViewport(0, 0, PVRShellGet(prefWidth), PVRShellGet(prefHeight));

		glEnable(GL_BLEND);
		glBlendFunc(GL_ONE, GL_ONE);

		glActiveTexture(GL_TEXTURE0);
		glBindTexture(GL_TEXTURE_2D, m_uiBlurTextures[0]);

		// Use the shader program for the scene
		glUseProgram(m_PostBloomShaderProgram.uiId);

		/*
		    The following section will draw a quad on the screen
			where the post processing pixel shader shall be executed.
			Try to minimize the area by only drawing where the actual
			post processing should happen, as this is a very costly operation.
		*/
		if (PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen)) {
			DrawAxisAlignedQuad(PVRTVec2(-0.875f, -0.5f), PVRTVec2(0.0625f, 0.25f),
					            PVRTVec2(0.8755f, 0.5f), PVRTVec2(0.9375f, 0.75f));
		}
		else
		{
			DrawAxisAlignedQuad(PVRTVec2(-0.5f, -0.875f), PVRTVec2(0.25f, 0.0625f),
				                PVRTVec2(0.5f, 0.875f), PVRTVec2(0.75f, 0.9375f));
		}

		glDisable(GL_BLEND);
	}

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("Bloom", NULL, ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();


	return true;
}
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2Iridescence::RenderScene()
{
	// Keyboard input (cursor up/down to change thickness variation)
	if (PVRShellIsKeyPressed(PVRShellKeyNameUP))
	{
		m_fMaxVariation += 1.0f;
	}
	else if (PVRShellIsKeyPressed(PVRShellKeyNameDOWN))
	{
		m_fMaxVariation = PVRT_MAX(0.0f, m_fMaxVariation - 1.0f);
	}

	// Keyboard input (cursor left/right to change minimum thickness)
	if (PVRShellIsKeyPressed(PVRShellKeyNameRIGHT))
	{
		m_fMinThickness += 1.0f;
	}
	else if (PVRShellIsKeyPressed(PVRShellKeyNameLEFT))
	{
		m_fMinThickness = PVRT_MAX(0.0f, m_fMinThickness - 1.0f);
	}

	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use shader program
	glUseProgram(m_ShaderProgram.uiId);

	// Bind texture
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiTexture);

	// Rotate and Translation the model matrix
	PVRTMat4 mModel;
	mModel = PVRTMat4::RotationY(m_fAngleY);
	m_fAngleY += (2*PVRT_PI/60)/7;

	// Set model view projection matrix
	PVRTMat4 mModelView, mMVP;
	mModelView = m_mView * mModel;
	mMVP =  m_mProjection * mModelView;
	glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.ptr());

	// Set light direction in model space
	PVRTVec4 vLightDirModel;
	vLightDirModel = mModel.inverse() * PVRTVec4(1, 1, 1, 0);

	glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, &vLightDirModel.x);

	// Set eye position in model space
	PVRTVec4 vEyePosModel;
	vEyePosModel = mModelView.inverse() * PVRTVec4(0, 0, 0, 1);
	glUniform3fv(m_ShaderProgram.uiEyePosLoc, 1, &vEyePosModel.x);

	/*
		Set the iridescent shading parameters
	*/
	// Set the minimum thickness of the coating in nm
	glUniform1f(m_ShaderProgram.uiMinThicknessLoc, m_fMinThickness);

	// Set the maximum variation in thickness of the coating in nm
	glUniform1f(m_ShaderProgram.uiMaxVariationLoc, m_fMaxVariation);

	/*
		Now that the uniforms are set, call another function to actually draw the mesh.
	*/
	DrawMesh(0);

	m_Print3D.Print3D(2.0f, 10.0f, 0.75f, 0xffffffff, "Minimum Thickness:");
	m_Print3D.Print3D(2.0f, 15.0f, 0.75f, 0xffffffff, "%8.0f nm", m_fMinThickness);
	m_Print3D.Print3D(2.0f, 20.0f, 0.75f, 0xffffffff, "Maximum Variation:");
	m_Print3D.Print3D(2.0f, 25.0f, 0.75f, 0xffffffff, "%8.0f nm", m_fMaxVariation);

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("Iridescence", "", ePVRTPrint3DLogoIMG);
	m_Print3D.Flush();

	return true;
}