コード例 #1
0
/*!****************************************************************************
 @Function		DrawBall
 @Description	Draws the reflective and refractive ball onto the screen.
******************************************************************************/
void OGLES2Glass::DrawBall() {
	// Set model view projection matrix
	PVRTMat4 mModel, mModelView, mMVP;

	mModel = PVRTMat4::Scale(6.0f, 6.0f, 6.0f);
	mModelView = m_mView * mModel;
	mMVP = m_mProjection * mModelView;

	// Use shader program
	glUseProgram(m_aEffectPrograms[m_iEffect].uiId);

	// Bind textures
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiParaboloidTexture);
	glActiveTexture(GL_TEXTURE1);
	glBindTexture(GL_TEXTURE_CUBE_MAP, m_uiCubeTex);

	glUniformMatrix4fv(m_aEffectPrograms[m_iEffect].auiLoc[eMVPMatrix], 1, GL_FALSE, mMVP.ptr());

	// Set model matrix
	PVRTMat3 Model3x3 = PVRTMat3(mModel);
	glUniformMatrix3fv(m_aEffectPrograms[m_iEffect].auiLoc[eMMatrix], 1, GL_FALSE, Model3x3.ptr());

	// Set eye position in model space
	PVRTVec4 vEyePosModel;
	vEyePosModel = mModelView.inverse() * PVRTVec4(0, 0, 0, 1);
	glUniform3fv(m_aEffectPrograms[m_iEffect].auiLoc[eEyePos], 1, &vEyePosModel.x);

	// Now that the uniforms are set, call another function to actually draw the mesh
	DrawMesh(0, &m_Ball, &m_puiVbo, &m_puiIndexVbo, 2);
}
コード例 #2
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2Fog::RenderScene()
{
	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Keyboard input (cursor to change fog function)
	if (PVRShellIsKeyPressed(PVRShellKeyNameLEFT))
	{
		m_eFogMode = EFogMode((m_eFogMode + eNumFogModes - 1) % eNumFogModes);
	}
	if (PVRShellIsKeyPressed(PVRShellKeyNameRIGHT))
	{
		m_eFogMode = EFogMode((m_eFogMode + 1) % eNumFogModes);
	}

	// Use the loaded shader program
	glUseProgram(m_ShaderProgram.uiId);

	// Bind texture
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiTexture);

	// Set uniforms
	glUniform1i(m_ShaderProgram.uiFogFuncLoc, m_eFogMode);

	// Rotate and translate the model matrix
	PVRTMat4 mModel = PVRTMat4::RotationY(m_fAngleY);
	m_fAngleY += PVRT_PI / 90;
	mModel.preTranslate(0, 0, 500 * cos(m_fPositionZ) - 450);	
	m_fPositionZ += (2*PVRT_PI)*0.0008f;

	// Feed Projection and Model View matrices to the shaders
	PVRTMat4 mModelView = m_mView * mModel;
	PVRTMat4 mMVP = m_mProjection * mModelView;

	glUniformMatrix4fv(m_ShaderProgram.uiModelViewLoc, 1, GL_FALSE, mModelView.ptr());
	glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.ptr());

	// Pass the light direction transformed with the inverse of the ModelView matrix
	// This saves the transformation of the normals per vertex. A simple dot3 between this direction
	// and the un-transformed normal will allow proper smooth shading.
	PVRTVec3 vMsLightDir = (PVRTMat3(mModel).inverse() * PVRTVec3(1, 1, 1)).normalized();
	glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, vMsLightDir.ptr());

	/*
		Now that the model-view matrix is set and the materials ready,
		call another function to actually draw the mesh.
	*/
	DrawMesh(0);

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("Fog", "", ePVRTPrint3DLogoIMG);
	m_Print3D.Print3D(0.3f, 7.5f, 0.75f, PVRTRGBA(255,255,255,255), "Fog Mode: %s", g_FogFunctionList[m_eFogMode]);
	m_Print3D.Flush();

	return true;
}
コード例 #3
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occurred
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevant OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2ParallaxBumpMap::RenderScene()
{
	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use shader program
	glUseProgram(m_ShaderProgram.uiId);

	// Bind textures
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiBaseTex);
	glActiveTexture(GL_TEXTURE1);
	glBindTexture(GL_TEXTURE_2D, m_uiNormalMap);
	glActiveTexture(GL_TEXTURE2);
	glBindTexture(GL_TEXTURE_2D, m_uiHeightMap);

	// Calculate the model matrix
	PVRTMat4 mModel = PVRTMat4::Scale(g_CubeScale);
	mModel *= PVRTMat4::Translation(g_CubeTranslation);
	mModel *= PVRTMat4::RotationY(m_fAngleY);
	m_fAngleY += PVRT_PI / 450;

	// Set the Model View matrix
	PVRTMat4 mMV = m_mView * mModel;
	glUniformMatrix4fv(m_ShaderProgram.auiLoc[eModelViewMatrix], 1, GL_FALSE, mMV.ptr());

	// Set the ModelViewIT Matrix
	PVRTMat4 mMIT = mMV.transpose();
	mMIT = mMIT.inverseEx();
	PVRTMat3 mMIT3x3 = PVRTMat3(mMIT);
	glUniformMatrix3fv(m_ShaderProgram.auiLoc[eNormal], 1, GL_FALSE, mMIT3x3.ptr());


	// Set model view projection matrix
	PVRTMat4 mMVP = m_mViewProj * mModel;
	glUniformMatrix4fv(m_ShaderProgram.auiLoc[eModelViewProj], 1, GL_FALSE, mMVP.ptr());

	// Set light position in eye space
	PVRTVec4 vEyeSpaceLightPos = m_mView * g_LightPos;
	glUniform3fv(m_ShaderProgram.auiLoc[eLightEyeSpacePos], 1, vEyeSpaceLightPos.ptr());

	DrawMesh(0);

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("Parallax Bumpmap", "", ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();

	return true;
}
コード例 #4
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES3ComplexLighting::RenderScene()
{
	// Clears the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Keyboard input (cursor to change light)
	if (PVRShellIsKeyPressed(PVRShellKeyNameLEFT))
	{
		m_eLightType = ELightType((m_eLightType + eNumLightTypes - 1) % eNumLightTypes);
	}
	if (PVRShellIsKeyPressed(PVRShellKeyNameRIGHT))
	{
		m_eLightType = ELightType((m_eLightType + 1) % eNumLightTypes);
	}

	// Use shader program
	glUseProgram(m_ShaderProgram.uiId);

	// Bind texture
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiTexture);

	glUniform1i(m_ShaderProgram.uiLightSelLoc, m_eLightType);

	// Rotate and Translation the model matrix
	PVRTMat4 mModel = PVRTMat4::RotationY(m_fAngleY);
	m_fAngleY += PVRT_PI / 150;

	// Set model view projection matrix
	PVRTMat4 mModelView = m_mView * mModel;
	PVRTMat4 mMVP = m_mProjection * mModelView;
	glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.ptr());

	// Set model view matrix
	glUniformMatrix4fv(m_ShaderProgram.uiModelViewLoc, 1, GL_FALSE, mModelView.ptr());

	// Set model view inverse transpose matrix
	PVRTMat3 mModelViewIT = PVRTMat3(mModelView).inverse().transpose();
	glUniformMatrix3fv(m_ShaderProgram.uiModelViewITLoc, 1, GL_FALSE, mModelViewIT.ptr());

	DrawMesh(0);

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("ComplexLighting", c_aszLightTypeList[m_eLightType], ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();

	return true;
}
コード例 #5
0
ファイル: ShaderEnvMap.cpp プロジェクト: dovalec/3D
void ShaderEnvMap::UseProgram()
{
	Shader::UseProgram();
	
	glUniform1i(myCubeReflection, false);
	
	glUniform1i(my2DMap, 0);
	glUniform1i(myCubeMap, 1);

	CameraManager * pCameraManager = CameraManager::GetCameraManager();
	Camera * pCurrentCamera = pCameraManager->GetCurrentCamera();
	RenderLayerManager & renderManager = RenderLayerManager::GetRenderLayerManager();
	Mesh * pCurrentMesh = renderManager.GetCurrentMesh();

	PVRTMat4 viewMtx(pCurrentCamera->GetViewMtx().f);
	
	static float m_fAngleX = 0.0;
	static float m_fAngleY = 0.0;

	PVRTMat4 mModel, mRotX, mRotY;
	mRotX = PVRTMat4::RotationX(m_fAngleX);
	mRotY = PVRTMat4::RotationY(m_fAngleY);
	
	mModel = mRotY * mRotX;

	m_fAngleX += 0.01f;
	//m_fAngleY += 0.011f;

	//PVRTMat4 meshWorld( pCurrentMesh->GetWorldMtx().f );
	PVRTMat4 meshWorld = mModel;
	PVRTMat4 modelView = viewMtx * meshWorld;
	
	// Set model matrix
	PVRTMat3 model3x3 = PVRTMat3(meshWorld);
	glUniformMatrix3fv( myModelWorld, 1, GL_FALSE, model3x3.ptr());

	// Set eye position in model space
	PVRTVec4 eyePosModel;
	eyePosModel = modelView.inverse() * PVRTVec4(0, 0, 0, 1);

	glUniform3fv(myEyePosModel, 1, &eyePosModel.x);


}
コード例 #6
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2FastTnL::RenderScene()
{
	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use shader program
	glUseProgram(m_ShaderProgram.uiId);

	// Bind texture
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiTexture);

	/*
		Now that the uniforms are set, call another function to actually draw the mesh.
	*/
	DrawMesh(0);

	// Rotate the model matrix
	PVRTMat4 mModel = PVRTMat4::RotationY(m_fAngleY);
	m_fAngleY += 0.02f;

	// Calculate model view projection matrix
	PVRTMat4 mMVP = m_mViewProj * mModel;

	// Feeds Projection Model View matrix to the shaders
	glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.ptr());

	/*
		The inverse of a rotation matrix is the transposed matrix
		Because of v * M = transpose(M) * v, this means:
		v * R == inverse(R) * v
		So we don't have to actually invert or transpose the matrix
		to transform back from world space to model space
	*/
	PVRTVec3 vMsLightDir = (PVRTVec3(1, 1, 1) * PVRTMat3(mModel)).normalized();
	glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, vMsLightDir.ptr());

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("FastTnL", "", ePVRTPrint3DLogoIMG);
	m_Print3D.Flush();

	return true;
}
コード例 #7
0
ファイル: Mesh3D.cpp プロジェクト: quocble/LimbicGL
void Mesh3D::draw(SceneGraph *scene,  Sprite3D *sprite, int min, int max)
{
	
	scene->m_ppEffect[m_shader]->Activate();
	
	if(m_blendEnable)
	{
		glEnable (GL_BLEND);
		glBlendFunc (m_blend1, m_blend2);
	} else 
	{
		glDisable (GL_BLEND);
	}
	
	//glEnable(GL_SAMPLE_COVERAGE);
	//glSampleCoverage(1.0, GL_FALSE);
	
	for(unsigned int j = 0; j < scene->m_pnUniformCnt[m_shader]; ++j)
	{
		//unsigned int location = scene->m_ppsUniforms[m_shader][j].nLocation;
		EUniformSemantic semantic = (EUniformSemantic)scene->m_ppsUniforms[m_shader][j].nSemantic; 
		switch(semantic)
		{
			case eUsMVPMATRIX:
			{
				PVRTMat4 mMVP;
				
				/* Passes the model-view-projection matrix (MVP) to the shader to transform the vertices */
				
				if(useSceneModel)
				{
					mMVP = scene->m_mProjection * scene->m_mModelView * sprite->modelView;
				}
				else 
				{
					mMVP = scene->m_mProjection * sprite->modelView;
				}
				glUniformMatrix4fv(scene->m_ppsUniforms[m_shader][j].nLocation, 1, GL_FALSE, mMVP.f);
			}
				break;
			case eUsMODELVIEW:
			{
				PVRTMat4 MV = useSceneModel ? scene->m_mModelView * sprite->modelView : sprite->modelView  ;
				glUniformMatrix4fv(scene->m_ppsUniforms[m_shader][j].nLocation, 1, GL_FALSE, MV.f);
			}
				break;
			case eUsMODELVIEWIT:
			{
				PVRTMat4 mModelViewI, mModelViewIT;
				PVRTMat4 MV = useSceneModel ?  scene->m_mModelView * sprite->modelView : sprite->modelView ;
				/* Passes the inverse transpose of the model-view matrix to the shader to transform the normals */
				mModelViewI = MV.inverse();
				mModelViewIT= mModelViewI.transpose();
				PVRTMat3 ModelViewIT = PVRTMat3(mModelViewIT);
				
				glUniformMatrix3fv(scene->m_ppsUniforms[m_shader][j].nLocation, 1, GL_FALSE, ModelViewIT.f);
			}
				break;
			case eUsVIEWIT:
			{
				PVRTMat4 mViewI, mViewIT;
				
				/* Passes the inverse transpose of the model-view matrix to the shader to transform the normals */
				mViewI = scene->m_mView.inverse();
				mViewIT= mViewI.transpose();
				
				PVRTMat3 ViewIT = PVRTMat3(mViewIT);
				
				glUniformMatrix3fv(scene->m_ppsUniforms[m_shader][j].nLocation, 1, GL_FALSE, ViewIT.f);
			}
				break;	
                
            default:
                break;

		}
	}
	
	
	for(int i32MeshIndex  =min; i32MeshIndex < max; i32MeshIndex++)
	{
		//int i32MeshIndex = i;
		//int i32MeshIndex = m_ModelPOD.pNode[i].nIdx;
		//SPODMesh* submesh = &m_ModelPOD.pMesh[i32MeshIndex];
		//int materialIndex = m_ModelPOD.pNode[i].nIdxMaterial;
		//SPODMaterial* pMaterial = &m_ModelPOD.pMaterial[materialIndex];
		
		//glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[i32MeshIndex]);
		int materialIndex = meshInfo[i32MeshIndex].materialIndex;
		
		for(unsigned int j = 0; j < scene->m_pnUniformCnt[m_shader]; ++j)
		{
			unsigned int location = scene->m_ppsUniforms[m_shader][j].nLocation;
			EUniformSemantic semantic = (EUniformSemantic)scene->m_ppsUniforms[m_shader][j].nSemantic; 
			switch(semantic)
			{
				case eUsMATERIALCOLORAMBIENT:
				{
					PVRTVec3 vColour = materialInfo[materialIndex].ambientColor;
					glUniform3f(location, vColour.x, vColour.y, vColour.z);
				}
					break;
				case eUsMATERIALCOLORDIFFUSE:
				{
					PVRTVec3 vColour =  materialInfo[materialIndex].diffuseColor;
					glUniform3f(location, vColour.x, vColour.y, vColour.z);
				}
					break;
					
				case eUsPOSITION:
				{
					glBindBuffer(GL_ARRAY_BUFFER, iVertexVBO[ i32MeshIndex] );
					//glVertexAttribPointer(m_ppsUniforms[m_nCurrentShader][j].nLocation, 3, GL_FLOAT, GL_FALSE, 0, (const void*) NULL);
					glVertexAttribPointer(scene->m_ppsUniforms[m_shader][j].nLocation, 3, GL_FLOAT, GL_FALSE, meshInfo[i32MeshIndex].vertexStride, (const void*) NULL);
					
					glEnableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
				}
					break;
				case eUsNORMAL:
				{
					glBindBuffer(GL_ARRAY_BUFFER, iVertexVBO[ i32MeshIndex]);
					glVertexAttribPointer(scene->m_ppsUniforms[m_shader][j].nLocation, 3, GL_FLOAT, GL_FALSE, 
										  meshInfo[i32MeshIndex].normalStride, (const void*)  meshInfo[i32MeshIndex].normalOffset);
					
					glEnableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
				}
					break;
				case eUsTANGENT:
				{
					glBindBuffer(GL_ARRAY_BUFFER, iVertexVBO[ i32MeshIndex]);
					glVertexAttribPointer(scene->m_ppsUniforms[m_shader][j].nLocation, 3, GL_FLOAT, GL_FALSE, 
										  meshInfo[i32MeshIndex].tangentStride, (const void*)  meshInfo[i32MeshIndex].tangentOffset);
					
					glEnableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
					
				}
					break;
				case eUsUV:
				{
					
					//glVertexAttribPointer(m_ppsUniforms[m_nCurrentShader][j].nLocation, 2, GL_FLOAT, GL_FALSE, 0, (const void*) NULL);
					if( meshInfo[i32MeshIndex].uvOffset != 0)
					{
						glBindBuffer(GL_ARRAY_BUFFER, iVertexVBO[i32MeshIndex]);
						glVertexAttribPointer(scene->m_ppsUniforms[m_shader][j].nLocation, 2, GL_FLOAT, GL_FALSE, 
											  meshInfo[i32MeshIndex].uvStride, (const void*)  meshInfo[i32MeshIndex].uvOffset);
						glEnableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
					}
				}
					break;

				case eUsTEXTURE:
				{
					// Set the sampler variable to the texture unit					
					int index = scene->m_ppsUniforms[m_shader][j].nIdx;
					
					switch(index)
					{
						case 0:
						{
							GLuint tex = m_uiTexture[materialIndex];
							glActiveTexture(GL_TEXTURE0);
							glBindTexture(GL_TEXTURE_2D, tex);
							
							//NSLog(@"Normal map %d %d", index, tex);
							glUniform1i(scene->m_ppsUniforms[m_shader][j].nLocation, index);
						}
							break;
						case 1:
						{
							//NSLog(@"Normal map %d %d", index, m_normalMap);
							glActiveTexture(GL_TEXTURE1);
							glBindTexture(GL_TEXTURE_2D, m_normalMap);
						
							glUniform1i(scene->m_ppsUniforms[m_shader][j].nLocation, index);
						}
							break;
						case 2:
						{
							glActiveTexture(GL_TEXTURE2);
							glBindTexture(GL_TEXTURE_CUBE_MAP, m_cubeMap);

							glUniform1i(scene->m_ppsUniforms[m_shader][j].nLocation, index);
						}
							break;
					}
					
				}				
					break;
					
				case eUsTEXTURE_ENABLED:
				{
					int texture_enabled = 0;
					int texture = (signed int)m_uiTexture[materialIndex];
					if(texture != INT_MAX)
						texture_enabled = 1;
						
					//printf("Texture %d %d\n", m_uiTexture[materialIndex], texture_enabled);
					if(m_normalMap > 0)
					{
						texture_enabled |= 2;
					}
					if(m_cubeMap > 0)
					{
						texture_enabled |= 4;
					}
					
					glUniform1i(scene->m_ppsUniforms[m_shader][j].nLocation, texture_enabled);
				}
					break;
				case eUsANIMATION:
				{
					// Float in the range 0..1: contains this objects distance through its animation.
					float fAnimation = 0.5f * scene->m_fViewAngle / PVRT_PI;
					glUniform1f(scene->m_ppsUniforms[m_shader][j].nLocation, fAnimation);
				}
					break;
				case eUsMATERIALSHININESS:
				{
					float shiness =  materialInfo[materialIndex].shiness;
					glUniform1f(location, shiness);
				}
					break;
				case eUsMATERIALCOLORSPECULAR:
				{
					PVRTVec3 vColour =  materialInfo[materialIndex].specularColor;
					glUniform3f(location, vColour.x, vColour.y, vColour.z);
				}
					break;
				case eUsLIGHTPOSWORLD:
				{
					PVRTVec3 position(45, 72, 52);
					glUniform3f(location, position.x , position.y, position.z);
				}
					break;
                default:
                    break;
			}
		}
		// Load the correct texture using our texture lookup table
		
		
		//glBindBuffer(GL_ARRAY_BUFFER, 0);	// Unbind the last buffer used.
		drawMesh(i32MeshIndex, materialIndex);
	}
	
	/*
	 Disable attributes
	 */
	for(unsigned int j = 0; j < scene->m_pnUniformCnt[m_shader]; ++j)
	{
		switch(scene->m_ppsUniforms[m_shader][j].nSemantic)
		{
			case eUsPOSITION:
			{
				glDisableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
			}
				break;
			case eUsNORMAL:
			{
				glDisableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
			}
				break;
			case eUsUV:
			{
				glDisableVertexAttribArray(scene->m_ppsUniforms[m_shader][j].nLocation);
			}
				break;
		
		}
	}

	glDisable(GL_BLEND);

}
コード例 #8
0
	void UniformHandler::CalculateMeshUniform(const Uniform& sUniform, SPODMesh *pMesh, SPODNode *pNode)
	{
		switch(sUniform.getSemantic())
		{
		case eUsPosition:
			{
				glVertexAttribPointer(sUniform.getLocation(), 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData);
				glEnableVertexAttribArray(sUniform.getLocation());
			}
			break;
		case eUsNormal:
			{
				glVertexAttribPointer(sUniform.getLocation(), 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData);
				glEnableVertexAttribArray(sUniform.getLocation());
			}
			break;
		case eUsTangent:
			{
				glVertexAttribPointer(sUniform.getLocation(), 3, GL_FLOAT, GL_FALSE, pMesh->sTangents.nStride, pMesh->sTangents.pData);
				glEnableVertexAttribArray(sUniform.getLocation());
			}
			break;
		case eUsBinormal:
			{
				glVertexAttribPointer(sUniform.getLocation(), 2, GL_FLOAT, GL_FALSE, pMesh->sBinormals.nStride, pMesh->sBinormals.pData);
				glEnableVertexAttribArray(sUniform.getLocation());
			}
			break;
		case eUsUV:
			{
				glVertexAttribPointer(sUniform.getLocation(), 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
				glEnableVertexAttribArray(sUniform.getLocation());
			}
			break;
		case eUsBoneIndex:
			{
				glVertexAttribPointer(sUniform.getLocation(), pMesh->sBoneIdx.n, GL_UNSIGNED_BYTE, GL_FALSE, pMesh->sBoneIdx.nStride, pMesh->sBoneIdx.pData);
				glEnableVertexAttribArray(sUniform.getLocation());
			}
			break;
		case eUsBoneWeight:
			{
				glVertexAttribPointer(sUniform.getLocation(), pMesh->sBoneWeight.n, GL_FLOAT, GL_FALSE, pMesh->sBoneWeight.nStride, pMesh->sBoneWeight.pData);
				glEnableVertexAttribArray(sUniform.getLocation());
			}
			break;
		case eUsWORLD:
			{
				glUniformMatrix4fv(sUniform.getLocation(), 1, GL_FALSE, m_mWorld.f);
			}
			break;
		case eUsWORLDI:
			{
				PVRTMat4 mWorldI;
				mWorldI =  m_mWorld.inverse();
				glUniformMatrix4fv(sUniform.getLocation(), 1, GL_FALSE, mWorldI.f);
			}
			break;
		case eUsWORLDIT:
			{
				PVRTMat3 mWorldIT;
				mWorldIT = m_mWorld.inverse().transpose();
				glUniformMatrix3fv(sUniform.getLocation(), 1, GL_FALSE, mWorldIT.f);
			}
			break;
		case eUsWORLDVIEW:
			{
				glUniformMatrix4fv(sUniform.getLocation(), 1, GL_FALSE, m_mWorldView.f);
			}
			break;
		case eUsWORLDVIEWI:
			{
				PVRTMat4 mWorldViewI;
				mWorldViewI = m_mWorldView.inverse();
				glUniformMatrix4fv(sUniform.getLocation(), 1, GL_FALSE, mWorldViewI.f);
			}
			break;
		case eUsWORLDVIEWIT:
			{
				PVRTMat3 mWorldViewIT;
				mWorldViewIT = m_mWorldView.inverse().transpose();
				glUniformMatrix3fv(sUniform.getLocation(), 1, GL_FALSE, mWorldViewIT.f);
			}
			break;
		case eUsWORLDVIEWPROJECTION:
			{
				glUniformMatrix4fv(sUniform.getLocation(), 1, GL_FALSE, m_mWorldViewProjection.f);
			}
			break;
		case eUsWORLDVIEWPROJECTIONI:
			{
				PVRTMat4 mWorldViewProjectionI = (m_mProjection * m_mWorldView ).inverse();
				glUniformMatrix4fv(sUniform.getLocation(), 1, GL_FALSE, mWorldViewProjectionI.f);
			}
			break;
		case eUsWORLDVIEWPROJECTIONIT:
			{
				PVRTMat3 mWorldViewProjectionIT = (m_mProjection * m_mWorldView).inverse().transpose();
				glUniformMatrix3fv(sUniform.getLocation(), 1, GL_FALSE, mWorldViewProjectionIT.f);
			}
			break;
		case eUsLIGHTPOSMODEL:
			{
				// Passes the light position in eye space to the shader
				Light* pLight = m_pLightManager->get(sUniform.getIdx());
				switch(pLight->getType())
				{
				case eLightTypePoint:
					{
						PVRTVec4 vLightPosModel = m_mWorld.inverse() * ((LightPoint*)pLight)->getPositionPVRTVec4() ;
						glUniform3f(sUniform.getLocation(),
							vLightPosModel.x,
							vLightPosModel.y,
							vLightPosModel.z);
					}
					break;
				case eLightTypePODPoint:
					{
						PVRTVec4 vLightPosModel = m_mWorld.inverse() * ((LightPODPoint*)pLight)->getPositionPVRTVec4() ;
						glUniform3f(sUniform.getLocation(),
							vLightPosModel.x,
							vLightPosModel.y,
							vLightPosModel.z);
					}
					break;
				default:
					{	// hack for directional lights
						// take the light direction and multiply it by a really big negative number
						// if you hit this code then the types of your lights do not match the types expected by your shaders
						PVRTVec4 vLightPosModel = (((LightDirectional*)pLight)->getDirectionPVRTVec4()*c_fFarDistance) ;
						vLightPosModel.w = f2vt(1.0f);
						vLightPosModel = m_mWorld * vLightPosModel;
						glUniform3f(sUniform.getLocation(),
							vLightPosModel.x,
							vLightPosModel.y,
							vLightPosModel.z);
					}
				}
			}
			break;
		case eUsOBJECT:
			{
				// Scale
				PVRTMat4 mObject = m_psScene->GetScalingMatrix(*pNode);
				// Rotation
				mObject = m_psScene->GetRotationMatrix(*pNode) * mObject;
				// Translation
				mObject = m_psScene->GetTranslationMatrix(*pNode) * mObject;

				glUniformMatrix4fv(sUniform.getLocation(), 1, GL_FALSE, mObject.f);
			}
			break;
		case eUsOBJECTI:
			{
				if(!getFlag(eUsOBJECT))
				{
					// Scale
					m_mObject = m_psScene->GetScalingMatrix(*pNode);
					// Rotation
					m_mObject = m_psScene->GetRotationMatrix(*pNode) * m_mObject;
					// Translation
					m_mObject = (m_psScene->GetTranslationMatrix(*pNode) * m_mObject);
					setFlag(eUsOBJECT);
				}
				m_mObjectI = m_mObject.inverse();

				glUniformMatrix4fv(sUniform.getLocation(), 1, GL_FALSE, m_mObjectI.f);
			}
			break;
		case eUsOBJECTIT:
			{
				if(!getFlag(eUsOBJECTI))
				{
					if(!getFlag(eUsOBJECT))
					{
						// Scale
						m_mObject = m_psScene->GetScalingMatrix(*pNode);
						// Rotation
						m_mObject = m_psScene->GetRotationMatrix(*pNode) * m_mObject;
						// Translation
						m_mObject = (m_psScene->GetTranslationMatrix(*pNode) * m_mObject);
						setFlag(eUsOBJECT);
					}
					m_mObjectI = m_mObject.inverse();
					setFlag(eUsOBJECTI);
				}

				m_mObjectIT = PVRTMat3(m_mObjectI).transpose();

				glUniformMatrix3fv(sUniform.getLocation(), 1, GL_FALSE, m_mObjectIT.f);
			}
			break;
		case eUsLIGHTDIRMODEL:
			{
				Light* pLight = m_pLightManager->get(sUniform.getIdx());
				switch(pLight->getType())
				{
				case eLightTypeDirectional:
					{
						// Passes the light direction in model space to the shader
						PVRTVec4 vLightDirectionModel,
							vLightDirection =((LightDirectional*)pLight)->getDirectionPVRTVec4();
						vLightDirectionModel = m_mWorld.inverse() * vLightDirection ;
						glUniform3f(sUniform.getLocation(), vLightDirectionModel.x, vLightDirectionModel.y, vLightDirectionModel.z);
					}
				case eLightTypePODDirectional:
					{
						// Passes the light direction in model space to the shader
						PVRTVec4 vLightDirectionModel,
							vLightDirection =((LightPODDirectional*)pLight)->getDirectionPVRTVec4();
						vLightDirectionModel = m_mWorld.inverse() * vLightDirection ;
						glUniform3f(sUniform.getLocation(), vLightDirectionModel.x, vLightDirectionModel.y, vLightDirectionModel.z);
					}
				default:
					{	// could mimic point lights
						// calculate vector between light position and mesh

						// implemented by getting hold of the nice centre point I calculated for all these meshes and using this point
					}
				}
			}
			break;
		case eUsEYEPOSMODEL:
			{	
				m_vEyePositionModel = m_mWorld.inverse() * PVRTVec4(m_vEyePositionWorld,VERTTYPE(1.0f));
				glUniform3f(sUniform.getLocation(), m_vEyePositionModel.x, m_vEyePositionModel.y, m_vEyePositionModel.z);
			}
			break;
		default:
			{	// something went wrong
				ConsoleLog::inst().log("Error: non-mesh uniform being interpreted as mesh uniform\n");
				return;
			}
		}
	}
コード例 #9
0
PVRTMat3 PVRTTextureAtlas::CalculateTransformation(const PVRTVec2 offset, const PVRTVec2 span, const PVRTVec2 border) const
{	
	return PVRTMat3(span.x - border.x, 0.0f, 0.0f,
		            0.0f, span.y - border.y, 0.0f,
		            offset.x + border.x, offset.y + border.y, 0.0f);
}
コード例 #10
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2LevelOfDetail::RenderScene()
{
	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use shader program
	glUseProgram(m_ShaderProgram.uiId);

	// Bind textures
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiReflectTex);
	glActiveTexture(GL_TEXTURE1);
	glBindTexture(GL_TEXTURE_2D, m_uiNormalTex);

	// Rotate and Translate the model matrix
	PVRTMat4 mModel, mRotY, mTrans;
	float fDistance = 1400.0f * cos(m_fPositionZ) - 1350.0f;
	
	mTrans = PVRTMat4::Translation(0.0, 0.0, fDistance);
	mRotY = PVRTMat4::RotationY(m_fAngleY);
	mModel = mTrans * mRotY;

	m_fAngleY += PVRT_PI / 210;
	m_fPositionZ += 2 * PVRT_PI * 0.0008f;

	// Set model view projection matrix
	PVRTMat4 mModelView, mMVP;
	mModelView = m_mView * mModel;
	mMVP = m_mProjection * mModelView;
	glUniformMatrix4fv(m_ShaderProgram.auiLoc[eMVPMatrix], 1, GL_FALSE, mMVP.ptr());

	// Set model matrix
	PVRTMat3 Model3x3 = PVRTMat3(mModel);

	glUniformMatrix3fv(m_ShaderProgram.auiLoc[eModelWorld], 1, GL_FALSE, Model3x3.ptr());

	// Set eye position in model space
	PVRTVec4 vEyePosModel;
	vEyePosModel = mModelView.inverse() * PVRTVec4(0, 0, 0, 1);

	glUniform3fv(m_ShaderProgram.auiLoc[eEyePosModel], 1, &vEyePosModel.x);

	// Calculate the square of the pixel area that the mesh takes up on screen
	// This is done by projecting the vertices of the bounding box to screen space
	// then taking the axis aligned 2D bounding box of the projected vertices.
	// This is a very conservative estimate
	float fMinX, fMaxX, fMinY, fMaxY, fX, fY;
	ProjectVertex(m_avBoundingBox[0], mMVP, fX, fY);
	fMinX = fMaxX = fX;
	fMinY = fMaxY = fY;

	for (int i = 1; i < 8; ++i)
	{
		ProjectVertex(m_avBoundingBox[i], mMVP, fX, fY);
		fMinX = PVRT_MIN(fMinX, fX);
		fMinY = PVRT_MIN(fMinY, fY);
		fMaxX = PVRT_MAX(fMaxX, fX);
		fMaxY = PVRT_MAX(fMaxY, fY);
	}

	// Choose high detail if the mesh bounding box covers more than 2% of the screen
	m_bHighDetail = ((fMaxX - fMinX) * (fMaxY - fMinY) > 0.02);
	glUniform1i(m_ShaderProgram.auiLoc[eHighDetail], m_bHighDetail);

	/*
		Now that the uniforms are set, call another function to actually draw the mesh.
	*/
	DrawMesh(m_bHighDetail ? 0 : 1);

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("Level of detail", (m_bHighDetail) ? "Detail: high" : "Detail: low", ePVRTPrint3DLogoIMG);
	m_Print3D.Flush();
	return true;
}
コード例 #11
0
/*******************************************************************************
 * Function Name : DrawModel
 * Description   : Draws the model
 *******************************************************************************/
void OGLES2Shaders::DrawModel()
{
	// Use the loaded effect
	m_ppEffect[m_nCurrentShader]->Activate();

	/*
		Set attributes and uniforms
	*/
	const CPVRTArray<SPVRTPFXUniform>& Uniforms = m_ppEffect[m_nCurrentShader]->GetUniformArray();

	for(unsigned int j = 0; j < Uniforms.GetSize(); ++j)
	{

		switch(Uniforms[j].nSemantic)
		{
		case ePVRTPFX_UsPOSITION:
			{
				glBindBuffer(GL_ARRAY_BUFFER, m_Surface->iVertexVBO);
				glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, 0, (const void*) NULL);
				glEnableVertexAttribArray(Uniforms[j].nLocation);
			}
			break;
		case ePVRTPFX_UsNORMAL:
			{
				glBindBuffer(GL_ARRAY_BUFFER, m_Surface->iNormalVBO);
				glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, 0, (const void*) NULL);
				glEnableVertexAttribArray(Uniforms[j].nLocation);
			}
			break;
		case ePVRTPFX_UsUV:
			{
				glBindBuffer(GL_ARRAY_BUFFER, m_Surface->iUvVBO);
				glVertexAttribPointer(Uniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, 0, (const void*) NULL);
				glEnableVertexAttribArray(Uniforms[j].nLocation);
			}
			break;
		case ePVRTPFX_UsWORLDVIEWPROJECTION:
			{
				PVRTMat4 mMVP;

				/* Passes the model-view-projection matrix (MVP) to the shader to transform the vertices */
				mMVP = m_mProjection * m_mModelView;
				glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mMVP.f);
			}
			break;
		case ePVRTPFX_UsWORLDVIEW:
			{
				glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, m_mModelView.f);
			}
			break;
		case ePVRTPFX_UsWORLDVIEWIT:
			{
				PVRTMat4 mModelViewI, mModelViewIT;

				/* Passes the inverse transpose of the model-view matrix to the shader to transform the normals */
				mModelViewI = m_mModelView.inverse();
				mModelViewIT= mModelViewI.transpose();
				PVRTMat3 ModelViewIT = PVRTMat3(mModelViewIT);

				glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, ModelViewIT.f);
			}
			break;
		case ePVRTPFX_UsVIEWIT:
			{
				PVRTMat4 mViewI, mViewIT;

				/* Passes the inverse transpose of the model-view matrix to the shader to transform the normals */
				mViewI = m_mView.inverse();
				mViewIT= mViewI.transpose();

				PVRTMat3 ViewIT = PVRTMat3(mViewIT);

				glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, ViewIT.f);
			}
			break;
		case ePVRTPFX_UsTEXTURE:
			{
				// Set the sampler variable to the texture unit
				glUniform1i(Uniforms[j].nLocation, Uniforms[j].nIdx);
			}
			break;
		case ePVRTPFX_UsANIMATION:
			{
				// Float in the range 0..1: contains this objects distance through its animation.
				float fAnimation = 0.5f * m_fViewAngle / PVRT_PI;
				glUniform1f(Uniforms[j].nLocation, fAnimation);
			}
			break;
		}
	}

	glBindBuffer(GL_ARRAY_BUFFER, 0);	// Unbind the last buffer used.

	glDrawElements(GL_TRIANGLES, m_Surface->GetNumFaces()*3, GL_UNSIGNED_SHORT, m_Surface->pIndex);

	/*
		Disable attributes
	*/
	for(unsigned int j = 0; j < Uniforms.GetSize(); ++j)
	{
		switch(Uniforms[j].nSemantic)
		{
		case ePVRTPFX_UsPOSITION:
			{
				glDisableVertexAttribArray(Uniforms[j].nLocation);
			}
			break;
		case ePVRTPFX_UsNORMAL:
			{
				glDisableVertexAttribArray(Uniforms[j].nLocation);
			}
			break;
		case ePVRTPFX_UsUV:
			{
				glDisableVertexAttribArray(Uniforms[j].nLocation);
			}
			break;
		}
	}

	return;
}
コード例 #12
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES2LightMap::RenderScene()
{
	// Clear the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use shader program
	glUseProgram(m_ShaderProgram.uiId);

	// Bind textures
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, m_uiBaseTex);
	glActiveTexture(GL_TEXTURE1);
	glBindTexture(GL_TEXTURE_2D, m_uiReflectTex);
	glActiveTexture(GL_TEXTURE2);
	glBindTexture(GL_TEXTURE_2D, m_uiShadowTex);

	// draw two models, mask and plane
	for (int i = 0; i < eNumModels; ++i)
	{
		// rotate and translate the model matrix
		PVRTMat4 mModel;

		if (i == eMask)
		{
			PVRTMat4 mRotX, mRotY;
			mRotX = PVRTMat4::RotationX(m_fAngleX);
			m_fAngleX += PVRT_PI / 300;
			mRotY = PVRTMat4::RotationY(m_fAngleY);
			m_fAngleY += PVRT_PI / 250;

			mModel = mRotY * mRotX;
		}
		else
		{
			mModel = PVRTMat4::Translation(0.0, 0.0, -25);
		}

		// Set model view projection matrix
		PVRTMat4 mModelView, mMVP;
		mModelView = m_mView * mModel;
		mMVP = m_mProjection * mModelView;
		glUniformMatrix4fv(m_ShaderProgram.auiLoc[eMVPMatrix], 1, GL_FALSE, mMVP.ptr());

		// Set shadow projection matrix
		PVRTMat4 mShadowProj;
		mShadowProj = m_mShadowViewProj * mModel;
		glUniformMatrix4fv(m_ShaderProgram.auiLoc[eShadowProj], 1, GL_FALSE, mShadowProj.ptr());

		// Set model world matrix
		PVRTMat3 fModelWorld = PVRTMat3(mModel);

		glUniformMatrix3fv(m_ShaderProgram.auiLoc[eModelWorld], 1, GL_FALSE, fModelWorld.ptr());

		// Set light position in model space
		PVRTVec4 vLightDirModel;
		vLightDirModel =  mModel.inverse() *  PVRTVec4( 1, 1, 1, 0 );

		glUniform3fv(m_ShaderProgram.auiLoc[eLightDirModel], 1, &vLightDirModel.x);

		// Set eye position in model space
		PVRTVec4 vEyePosModel;
		vEyePosModel = mModelView.inverse() * PVRTVec4(0, 0, 0, 1);

		glUniform3fv(m_ShaderProgram.auiLoc[eEyePosModel], 1, &vEyePosModel.x);

		m_Models[i].DrawMesh(0);
	}

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("LightMap", "", ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();

	return true;
}
コード例 #13
0
/*!****************************************************************************
 @Function		DrawMesh
 @Input			i32NodeIndex		Node index of the mesh to draw
 @Description	Draws a SPODMesh after the model view matrix has been set and
				the meterial prepared.
******************************************************************************/
void OGLES3Skinning::DrawMesh(int i32NodeIndex)
{
	SPODNode& Node = m_Scene.pNode[i32NodeIndex];
	SPODMesh& Mesh = m_Scene.pMesh[Node.nIdx];

	// bind the VBO for the mesh
	glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[Node.nIdx]);
	// bind the index buffer, won't hurt if the handle is 0
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[Node.nIdx]);

	// Enable the vertex attribute arrays
	glEnableVertexAttribArray(VERTEX_ARRAY);
	glEnableVertexAttribArray(NORMAL_ARRAY);
	glEnableVertexAttribArray(TEXCOORD_ARRAY);

	// Set the vertex attribute offsets
	glVertexAttribPointer(VERTEX_ARRAY, 3, GL_FLOAT, GL_FALSE, Mesh.sVertex.nStride,  Mesh.sVertex.pData);
	glVertexAttribPointer(NORMAL_ARRAY, 3, GL_FLOAT, GL_FALSE, Mesh.sNormals.nStride, Mesh.sNormals.pData);
	glVertexAttribPointer(TEXCOORD_ARRAY, 2, GL_FLOAT, GL_FALSE, Mesh.psUVW[0].nStride, Mesh.psUVW[0].pData);

	/*
		If the current mesh has bone index and weight data then we need to
		set up some additional variables in the shaders.
	*/
	if(Mesh.sBoneIdx.n && Mesh.sBoneWeight.n)
	{
		glEnableVertexAttribArray(BONEINDEX_ARRAY);
		glEnableVertexAttribArray(BONEWEIGHT_ARRAY);

		glVertexAttribPointer(BONEINDEX_ARRAY, Mesh.sBoneIdx.n, GL_UNSIGNED_BYTE, GL_FALSE, Mesh.sBoneIdx.nStride, Mesh.sBoneIdx.pData);
		glVertexAttribPointer(BONEWEIGHT_ARRAY, Mesh.sBoneWeight.n, GL_UNSIGNED_BYTE, GL_TRUE, Mesh.sBoneWeight.nStride, Mesh.sBoneWeight.pData);

		/*
			There is a limit to the number of bone matrices that you can pass to the shader so we have
			chosen to limit the number of bone matrices that affect a mesh to 8. However, this does
			not mean our character can only have a skeleton consisting of 8 bones. We can get around
			this by using bone batching where the character is split up into sub-meshes that are only
			affected by a sub set of the overal skeleton. This is why we have this for loop that
			iterates through the bone batches contained with the SPODMesh.
		*/
		for (int i32Batch = 0; i32Batch < Mesh.sBoneBatches.nBatchCnt; ++i32Batch)
		{
			// Set the number of bones that will influence each vertex in the mesh
			glUniform1i(m_ShaderProgram.auiLoc[eBoneCount], Mesh.sBoneIdx.n);

			// Go through the bones for the current bone batch
			PVRTMat4 amBoneWorld[8];
			PVRTMat3 afBoneWorldIT[8], mBoneIT;

			int i32Count = Mesh.sBoneBatches.pnBatchBoneCnt[i32Batch];

			for(int i = 0; i < i32Count; ++i)
			{
				// Get the Node of the bone
				int i32NodeID = Mesh.sBoneBatches.pnBatches[i32Batch * Mesh.sBoneBatches.nBatchBoneMax + i];

				// Get the World transformation matrix for this bone and combine it with our app defined
				// transformation matrix
				amBoneWorld[i] = m_Transform * m_Scene.GetBoneWorldMatrix(Node, m_Scene.pNode[i32NodeID]);

				// Calculate the inverse transpose of the 3x3 rotation/scale part for correct lighting
				afBoneWorldIT[i] = PVRTMat3(amBoneWorld[i]).inverse().transpose();
			}

			glUniformMatrix4fv(m_ShaderProgram.auiLoc[eBoneMatrices], i32Count, GL_FALSE, amBoneWorld[0].ptr());
			glUniformMatrix3fv(m_ShaderProgram.auiLoc[eBoneMatricesIT], i32Count, GL_FALSE, afBoneWorldIT[0].ptr());

			/*
				As we are using bone batching we don't want to draw all the faces contained within pMesh, we only want
				to draw the ones that are in the current batch. To do this we pass to the drawMesh function the offset
				to the start of the current batch of triangles (Mesh.sBoneBatches.pnBatchOffset[i32Batch]) and the
				total number of triangles to draw (i32Tris)
			*/
			int i32Tris;
			if(i32Batch+1 < Mesh.sBoneBatches.nBatchCnt)
				i32Tris = Mesh.sBoneBatches.pnBatchOffset[i32Batch+1] - Mesh.sBoneBatches.pnBatchOffset[i32Batch];
			else
				i32Tris = Mesh.nNumFaces - Mesh.sBoneBatches.pnBatchOffset[i32Batch];

			// Draw the mesh
			size_t offset = sizeof(GLushort) * 3 * Mesh.sBoneBatches.pnBatchOffset[i32Batch];
			glDrawElements(GL_TRIANGLES, i32Tris * 3, GL_UNSIGNED_SHORT, (void*) offset);
		}

		glDisableVertexAttribArray(BONEINDEX_ARRAY);
		glDisableVertexAttribArray(BONEWEIGHT_ARRAY);
	}
	else
	{
		glUniform1i(m_ShaderProgram.auiLoc[eBoneCount], 0);
		glDrawElements(GL_TRIANGLES, Mesh.nNumFaces*3, GL_UNSIGNED_SHORT, 0);
	}

	// Safely disable the vertex attribute arrays
	glDisableVertexAttribArray(VERTEX_ARRAY);
	glDisableVertexAttribArray(NORMAL_ARRAY);
	glDisableVertexAttribArray(TEXCOORD_ARRAY);

	glBindBuffer(GL_ARRAY_BUFFER, 0);
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
コード例 #14
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occurred
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevant OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES3MagicLantern::RenderScene()
{
	PVRTMat4 mLightWorld;

	// Clear the color and depth buffer.
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Time based animation and locks the app to 60 FPS.
	// Uses the shell function PVRShellGetTime() to get the time in milliseconds.
	unsigned long ulTime = PVRShellGetTime();
	unsigned long ulDeltaTime = ulTime - m_ulTimePrev;
	m_ulTimePrev = ulTime;
	m_fFrame += (float)ulDeltaTime * (60.0f/1000.0f);

	// Create an animation for the the position and rotation of the light-ball.
	m_LightPosition.x = cos(m_fFrame/140.0f)*60.0f;
	m_LightPosition.z = sin(m_fFrame/70.0f)*60.0f - 40.0f;
	m_LightPosition.y = sin(m_fFrame/100.0f)*20.0f;

	mLightWorld  = PVRTMat4::Translation(m_LightPosition.x, m_LightPosition.y-10, m_LightPosition.z);
	mLightWorld *= PVRTMat4::RotationX(m_fFrame/540.0f);
	mLightWorld *= PVRTMat4::RotationZ(m_fFrame/370.0f);
	
	// To draw a scene, you must go through all the MeshNodes and draw the referenced meshes.
	for (unsigned int i = 0; i < m_Scene.nNumMeshNode; ++i)
	{
		SPODNode& Node = m_Scene.pNode[i]; 

		// Get the current effect from the material ID. The list of IDs was built in LoadPFX()
		GLuint uiFXID = m_puiMaterialEffectID[Node.nIdxMaterial];

		// Use the loaded effect (Vertex and Fragment shader)
		// And also bind all textures in the effect.
		m_pFX[uiFXID]->Activate();
		
		// Set the blend mode
		// Based in the info stored in the material by PVRShaman.
		// We check whether the blend mode is 'opaque' (ONE,ZERO).
		// Otherwise we enable blending and set the corresponding operations.
		if (m_Scene.pMaterial[Node.nIdxMaterial].eBlendSrcRGB == ePODBlendFunc_ONE && m_Scene.pMaterial[Node.nIdxMaterial].eBlendDstRGB == ePODBlendFunc_ZERO)
		{
			glDisable(GL_BLEND);
		}
		else
		{
			glEnable(GL_BLEND);
			glBlendFunc(m_Scene.pMaterial[Node.nIdxMaterial].eBlendSrcRGB, m_Scene.pMaterial[Node.nIdxMaterial].eBlendDstRGB);
		}

		// Now process PVRShaman semantics and set-up the associated uniforms.
		const CPVRTArray<SPVRTPFXUniform>& Uniforms = m_pFX[uiFXID]->GetUniformArray();
		for(unsigned int j = 0; j < Uniforms.GetSize() ; ++j)
		{
			switch(Uniforms[j].nSemantic)
			{
			case ePVRTPFX_UsWORLDVIEWPROJECTION:
				{
					PVRTMat4 mWVP;

					// The whole scene (except the ball) is static.
					// The POD file was saved with 'word space' coordinates so there is no need to apply the
					// transformation matrices for the meshes, just the identity.
					// If you are exporting animation you will need to set the corresponding mesh
					// transformation matrix here (see IntroducingPOD training course).
					PVRTMat4 mWorld = PVRTMat4::Identity();

					// Check whether the current effect is the sphere and set up the word matrix with the sphere animation.
					if (m_ppEffectParser->GetEffect(uiFXID).Name == c_SphereEffectName)
					{
						mWorld = mLightWorld;
					}

					// Pass the model-view-projection matrix (MVP) to the shader to transform the vertices.
					PVRTMat4 mModelView;
					mModelView = m_mView * mWorld;
					mWVP = m_mProjection * mModelView;

					glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mWVP.f);
				} 
				break;
			case ePVRTPFX_UsWORLDIT:
				{
					// Passes the inverse transpose of the light rotation matrix.
					// This is needed to transform the light direction (from the light to each vertex) 
					// and it will give us a three component texture vector to map the cubemap texture.
					PVRTMat3 mLightModelIT = PVRTMat3(mLightWorld).inverse().transpose();
					glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, mLightModelIT.f);
				}
				break;
			case ePVRTPFX_UsLIGHTPOSWORLD:
				{
					glUniform3f(Uniforms[j].nLocation, m_LightPosition.x, m_LightPosition.y, m_LightPosition.z);
				}
				break;
			case ePVRTPFX_UsLIGHTCOLOR:
				{
					// Some color variation here to make it more interesting.
					float afLightColor[] = { 1.0f, sin(m_fFrame/300.0f)*0.3f+0.7f, cos(m_fFrame/400.0f)*0.3f+0.7f};
					glUniform3fv(Uniforms[j].nLocation, 1, afLightColor ); 
				}
				break;
			}
		}

		// Now that all transformation matrices and the materials are ready,
		// call a function to actually draw the mesh.
		// We need to pass the current effect to process 'attributes' properly.
		DrawMesh(i, m_pFX[uiFXID]);
	}

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools.
	m_Print3D.DisplayDefaultTitle("Magic Lantern", "", ePVRTPrint3DSDKLogo);
	m_Print3D.Flush();

	return true;
}
コード例 #15
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occured
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevent OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLESIntroducingPFX::RenderScene()
{
	// Clears the color and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Use the loaded effect
	m_pEffect->Activate();

	/*
		Calculates the frame number to animate in a time-based manner.
		Uses the shell function PVRShellGetTime() to get the time in milliseconds.
	*/
	int iTime = PVRShellGetTime();
	int iDeltaTime = iTime - m_iTimePrev;
	m_iTimePrev	= iTime;
	m_fFrame	+= (float)iDeltaTime * DEMO_FRAME_RATE;
	if (m_fFrame > m_Scene.nNumFrame-1)
		m_fFrame = 0;

	// Sets the scene animation to this frame
	m_Scene.SetFrame(m_fFrame);

	{
		PVRTVec3	vFrom, vTo, vUp;
		VERTTYPE	fFOV;
		vUp.x = 0.0f;
		vUp.y = 1.0f;
		vUp.z = 0.0f;

		// We can get the camera position, target and field of view (fov) with GetCameraPos()
		fFOV = m_Scene.GetCameraPos(vFrom, vTo, 0) * 0.4f;

		/*
			We can build the world view matrix from the camera position, target and an up vector.
			For this we use PVRTMat4LookAtRH().
		*/
		m_mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp);

		// Calculates the projection matrix
		bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen);
		m_mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), CAM_NEAR, CAM_FAR, PVRTMat4::OGL, bRotate);
	}

	/*
		A scene is composed of nodes. There are 3 types of nodes:
		- MeshNodes :
			references a mesh in the pMesh[].
			These nodes are at the beginning of the pNode[] array.
			And there are nNumMeshNode number of them.
			This way the .pod format can instantiate several times the same mesh
			with different attributes.
		- lights
		- cameras
		To draw a scene, you must go through all the MeshNodes and draw the referenced meshes.
	*/
	for (int i=0; i<(int)m_Scene.nNumMeshNode; i++)
	{
		SPODNode* pNode = &m_Scene.pNode[i];

		// Gets pMesh referenced by the pNode
		SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx];

		glBindBuffer(GL_ARRAY_BUFFER, m_aiVboID[i]);

		// Gets the node model matrix
		PVRTMat4 mWorld;
		mWorld = m_Scene.GetWorldMatrix(*pNode);

		PVRTMat4 mWorldView;
		mWorldView = m_mView * mWorld;

		for(unsigned int j = 0; j < m_nUniformCnt; ++j)
		{
			switch(m_psUniforms[j].nSemantic)
			{
			case eUsPOSITION:
				{
					glVertexAttribPointer(m_psUniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData);
					glEnableVertexAttribArray(m_psUniforms[j].nLocation);
				}
				break;
			case eUsNORMAL:
				{
					glVertexAttribPointer(m_psUniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData);
					glEnableVertexAttribArray(m_psUniforms[j].nLocation);
				}
				break;
			case eUsUV:
				{
					glVertexAttribPointer(m_psUniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
					glEnableVertexAttribArray(m_psUniforms[j].nLocation);
				}
				break;
			case eUsWORLDVIEWPROJECTION:
				{
					PVRTMat4 mWVP;

					/* Passes the world-view-projection matrix (WVP) to the shader to transform the vertices */
					mWVP = m_mProjection * mWorldView;
					glUniformMatrix4fv(m_psUniforms[j].nLocation, 1, GL_FALSE, mWVP.f);
				}
				break;
			case eUsWORLDVIEWIT:
				{
					PVRTMat4 mWorldViewI, mWorldViewIT;

					/* Passes the inverse transpose of the world-view matrix to the shader to transform the normals */
					mWorldViewI  = mWorldView.inverse();
					mWorldViewIT = mWorldViewI.transpose();

					PVRTMat3 WorldViewIT = PVRTMat3(mWorldViewIT);

					glUniformMatrix3fv(m_psUniforms[j].nLocation, 1, GL_FALSE, WorldViewIT.f);
				}
				break;
			case eUsLIGHTDIREYE:
				{
					// Reads the light direction from the scene.
					PVRTVec4 vLightDirection;
					PVRTVec3 vPos;
					vLightDirection = m_Scene.GetLightDirection(0);

					vLightDirection.x = -vLightDirection.x;
					vLightDirection.y = -vLightDirection.y;
					vLightDirection.z = -vLightDirection.z;

					/*
						Sets the w component to 0, so when passing it to glLight(), it is
						considered as a directional light (as opposed to a spot light).
					*/
					vLightDirection.w = 0;

					// Passes the light direction in eye space to the shader
					PVRTVec4 vLightDirectionEyeSpace;
					vLightDirectionEyeSpace = m_mView * vLightDirection;

					glUniform3f(m_psUniforms[j].nLocation, vLightDirectionEyeSpace.x, vLightDirectionEyeSpace.y, vLightDirectionEyeSpace.z);
				}
				break;
			case eUsTEXTURE:
				{
					// Set the sampler variable to the texture unit
					glUniform1i(m_psUniforms[j].nLocation, m_psUniforms[j].nIdx);
				}
				break;
			}
		}

		/*
			Now that the model-view matrix is set and the materials ready,
			call another function to actually draw the mesh.
		*/
		DrawMesh(pMesh);
		glBindBuffer(GL_ARRAY_BUFFER, 0);

		for(unsigned int j = 0; j < m_nUniformCnt; ++j)
		{
			switch(m_psUniforms[j].nSemantic)
			{
			case eUsPOSITION:
				{
					glDisableVertexAttribArray(m_psUniforms[j].nLocation);
				}
				break;
			case eUsNORMAL:
				{
					glDisableVertexAttribArray(m_psUniforms[j].nLocation);
				}
				break;
			case eUsUV:
				{
					glDisableVertexAttribArray(m_psUniforms[j].nLocation);
				}
				break;
			}
		}
	}

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	m_Print3D.DisplayDefaultTitle("IntroducingPFX", "", ePVRTPrint3DLogoIMG);
	m_Print3D.Flush();

	return true;
}
コード例 #16
0
/*!****************************************************************************
 @Function		DrawSkinnedMesh
 @Input			i32NodeIndex		Node index of the mesh to draw
 @Description	Draws a SPODMesh after the model view matrix has been set and
				the meterial prepared.
******************************************************************************/
void OGLES2ChameleonMan::DrawSkinnedMesh(int i32NodeIndex)
{
	SPODNode& Node = m_Scene.pNode[i32NodeIndex];
	SPODMesh& Mesh = m_Scene.pMesh[Node.nIdx];

	// bind the VBO for the mesh
	glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[Node.nIdx]);
	// bind the index buffer, won't hurt if the handle is 0
	glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[Node.nIdx]);

	// Set the vertex attribute offsets
	glVertexAttribPointer(VERTEX_ARRAY, 3, GL_FLOAT, GL_FALSE, Mesh.sVertex.nStride,  Mesh.sVertex.pData);
	glVertexAttribPointer(NORMAL_ARRAY, 3, GL_FLOAT, GL_FALSE, Mesh.sNormals.nStride, Mesh.sNormals.pData);
	glVertexAttribPointer(TANGENT_ARRAY, 3, GL_FLOAT, GL_FALSE, Mesh.sTangents.nStride, Mesh.sTangents.pData);
	glVertexAttribPointer(BINORMAL_ARRAY, 3, GL_FLOAT, GL_FALSE, Mesh.sBinormals.nStride, Mesh.sBinormals.pData);
	glVertexAttribPointer(TEXCOORD_ARRAY, 2, GL_FLOAT, GL_FALSE, Mesh.psUVW[0].nStride, Mesh.psUVW[0].pData);
	glVertexAttribPointer(BONEINDEX_ARRAY, Mesh.sBoneIdx.n, GL_UNSIGNED_BYTE, GL_FALSE, Mesh.sBoneIdx.nStride, Mesh.sBoneIdx.pData);
	glVertexAttribPointer(BONEWEIGHT_ARRAY, Mesh.sBoneWeight.n, GL_UNSIGNED_BYTE, GL_TRUE, Mesh.sBoneWeight.nStride, Mesh.sBoneWeight.pData);

	for(int i32Batch = 0; i32Batch < Mesh.sBoneBatches.nBatchCnt; ++i32Batch)
	{
		/*
			If the current mesh has bone index and weight data then we need to
			set up some additional variables in the shaders.
		*/

		// Set the number of bones that will influence each vertex in the mesh
		glUniform1i(m_SkinnedShaderProgram.auiLoc[eBoneCount], Mesh.sBoneIdx.n);

		// Go through the bones for the current bone batch
		PVRTMat4 amBoneWorld[8];
		PVRTMat3 afBoneWorldIT[8], mBoneIT;

		int i32Count = Mesh.sBoneBatches.pnBatchBoneCnt[i32Batch];

		for(int i = 0; i < i32Count; ++i)
		{
			// Get the Node of the bone
			int i32NodeID = Mesh.sBoneBatches.pnBatches[i32Batch * Mesh.sBoneBatches.nBatchBoneMax + i];

			// Get the World transformation matrix for this bone and combine it with our app defined
			// transformation matrix
			amBoneWorld[i] = m_Scene.GetBoneWorldMatrix(Node, m_Scene.pNode[i32NodeID]);

			// Calculate the inverse transpose of the 3x3 rotation/scale part for correct lighting
			afBoneWorldIT[i] = PVRTMat3(amBoneWorld[i]).inverse().transpose();
		}

		glUniformMatrix4fv(m_SkinnedShaderProgram.auiLoc[eBoneMatrices], i32Count, GL_FALSE, amBoneWorld[0].ptr());
		glUniformMatrix3fv(m_SkinnedShaderProgram.auiLoc[eBoneMatricesIT], i32Count, GL_FALSE, afBoneWorldIT[0].ptr());

		/*
			As we are using bone batching we don't want to draw all the faces contained within pMesh, we only want
			to draw the ones that are in the current batch. To do this we pass to the drawMesh function the offset
			to the start of the current batch of triangles (Mesh.sBoneBatches.pnBatchOffset[i32Batch]) and the
			total number of triangles to draw (i32Tris)
		*/
		int i32Tris;
		if(i32Batch+1 < Mesh.sBoneBatches.nBatchCnt)
			i32Tris = Mesh.sBoneBatches.pnBatchOffset[i32Batch+1] - Mesh.sBoneBatches.pnBatchOffset[i32Batch];
		else
			i32Tris = Mesh.nNumFaces - Mesh.sBoneBatches.pnBatchOffset[i32Batch];

		// Draw the mesh
		size_t offset = sizeof(GLushort) * 3 * Mesh.sBoneBatches.pnBatchOffset[i32Batch];
		glDrawElements(GL_TRIANGLES, i32Tris * 3, GL_UNSIGNED_SHORT, (void*) offset);
	}
}
コード例 #17
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occurred
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevant OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES3PhantomMask::RenderScene()
{
    if(PVRShellIsKeyPressed(PVRShellKeyNameACTION1))
        m_bEnableSH = !m_bEnableSH;

    // Clear the colour and depth buffer
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    // Draw the background
    m_Background.Draw(m_ui32TexBackground);

    // Enable culling
    glEnable(GL_CULL_FACE);

    // Enable depth testing
    glEnable(GL_DEPTH_TEST);

    // Use shader program
    GLuint ProgramID, MVPLoc, ModelLoc;

    if(m_bEnableSH)
    {
        ProgramID = m_SHShaderProgram.uiId;
        MVPLoc	  = m_SHShaderProgram.auiLoc[eSHMVPMatrix];
        ModelLoc  = m_SHShaderProgram.auiLoc[eSHModel];
    }
    else
    {
        ProgramID = m_DiffuseShaderProgram.uiId;
        MVPLoc	  = m_DiffuseShaderProgram.auiLoc[eDifMVPMatrix];
        ModelLoc  = m_DiffuseShaderProgram.auiLoc[eDifModel];
    }

    glUseProgram(ProgramID);

    /*
    	Calculates the frame number to animate in a time-based manner.
    	Uses the shell function PVRShellGetTime() to get the time in milliseconds.
    */
    unsigned long ulTime = PVRShellGetTime();

    if(ulTime > m_ulTimePrev)
    {
        unsigned long ulDeltaTime = ulTime - m_ulTimePrev;
        m_fFrame += (float)ulDeltaTime * g_fDemoFrameRate;

        if(m_fFrame > m_Scene.nNumFrame - 1)
            m_fFrame = 0;

        // Sets the scene animation to this frame
        m_Scene.SetFrame(m_fFrame);
    }

    m_ulTimePrev = ulTime;

    /*
    	Set up the view and projection matrices from the camera
    */
    PVRTMat4 mView, mProjection;
    PVRTVec3	vFrom, vTo(0.0f), vUp(0.0f, 1.0f, 0.0f);
    float fFOV;

    // Setup the camera
    bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen);

    // Camera nodes are after the mesh and light nodes in the array
    int i32CamID = m_Scene.pNode[m_Scene.nNumMeshNode + m_Scene.nNumLight + g_ui32Camera].nIdx;

    // Get the camera position, target and field of view (fov)
    if(m_Scene.pCamera[i32CamID].nIdxTarget != -1) // Does the camera have a target?
        fFOV = m_Scene.GetCameraPos( vFrom, vTo, g_ui32Camera); // vTo is taken from the target node
    else
        fFOV = m_Scene.GetCamera( vFrom, vTo, vUp, g_ui32Camera); // vTo is calculated from the rotation

    fFOV *= bRotate ? (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight) : (float)PVRShellGet(prefHeight)/(float)PVRShellGet(prefWidth);

    // We can build the model view matrix from the camera position, target and an up vector.
    // For this we usePVRTMat4LookAtRH()
    mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp);

    // Calculate the projection matrix
    mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), g_fCameraNear, g_fCameraFar, PVRTMat4::OGL, bRotate);

    SPODNode& Node = m_Scene.pNode[0];

    // Get the node model matrix
    PVRTMat4 mWorld;
    mWorld = m_Scene.GetWorldMatrix(Node);

    // Set the model inverse transpose matrix
    PVRTMat3 mMat3 = PVRTMat3(mWorld);

    if(m_bEnableSH)
        mMat3 *= PVRTMat3::RotationY(-1.047197f);

    glUniformMatrix3fv(ModelLoc, 1, GL_FALSE, mMat3.f);

    // Pass the model-view-projection matrix (MVP) to the shader to transform the vertices
    PVRTMat4 mModelView, mMVP;
    mModelView = mView * mWorld;
    mMVP = mProjection * mModelView;
    glUniformMatrix4fv(MVPLoc, 1, GL_FALSE, mMVP.f);

    glBindTexture(GL_TEXTURE_2D, m_ui32TexMask);
    DrawMesh(Node.nIdx);

    // Print text on screen

    if(m_bEnableSH)
    {
        // Base
        m_Print3D.DisplayDefaultTitle("PhantomMask", "Spherical Harmonics Lighting", ePVRTPrint3DSDKLogo);
    }
    else
    {
        // Base
        m_Print3D.DisplayDefaultTitle("PhantomMask", "Vertex Lighting", ePVRTPrint3DSDKLogo);
    }

    m_Print3D.Flush();

    return true;
}
コード例 #18
0
ファイル: PVRTVector.cpp プロジェクト: ForryShih/cocos3d
/*!***************************************************************************
 @Function			RotationZ
 @Input				angle the angle of rotation
 @Returns			rotation matrix
 @Description		generates a 3x3 rotation matrix about the Z axis
****************************************************************************/
	PVRTMat3 PVRTMat3::RotationZ(VERTTYPE angle)
	{
		PVRTMat4 out;
		PVRTMatrixRotationZ(out,angle);
		return PVRTMat3(out);
	}
コード例 #19
0
ファイル: Mesh.cpp プロジェクト: anonymousjustice/pvr-pi
void Mesh::DrawSkinned()
{
    /*
    There is a limit to the number of bone matrices that you can pass to the shader so we have
    chosen to limit the number of bone matrices that affect a mesh to 8. However, this does
    not mean our character can only have a skeleton consisting of 8 bones. We can get around
    this by using bone batching where the character is split up into sub-meshes that are only
    affected by a sub set of the overal skeleton. This is why we have this for loop that
    iterates through the bone batches contained within the SPODMesh.
    */


    for (int i32Batch = 0; i32Batch <m_psMesh->sBoneBatches.nBatchCnt; ++i32Batch)
    {
        /*
        If the current mesh has bone index and weight data then we need to
        set up some additional variables in the shaders.
        */
        if(m_psMesh->sBoneIdx.pData && m_psMesh->sBoneWeight.pData)
        {
            // Set the number of bones that will influence each vertex in the mesh
            glUniform1i(m_gliSkinningLocations[eBoneCount], m_psMesh->sBoneIdx.n);

            // Go through the bones for the current bone batch
            PVRTMat4 amBoneWorld[12];
            PVRTMat3 amBoneWorldIT[12];
            int i32BoneCnt = m_psMesh->sBoneBatches.pnBatchBoneCnt[i32Batch];
            _ASSERT(i32BoneCnt<=12);
            for(int i = 0; i < m_psMesh->sBoneBatches.pnBatchBoneCnt[i32Batch]; i++)
            {
                // Get the Node of the bone
                int i32NodeID = m_psMesh->sBoneBatches.pnBatches[i32Batch * m_psMesh->sBoneBatches.nBatchBoneMax + i];

                // Get the World transformation matrix for this bone
                amBoneWorld[i] = m_psScene->GetBoneWorldMatrix(*m_psNode, m_psScene->pNode[i32NodeID]);

                // Calculate the inverse transpose of the 3x3 rotation/scale part for correct lighting
                amBoneWorldIT[i] = PVRTMat3(amBoneWorld[i].inverse().transpose());
            }

            glUniformMatrix4fv(m_gliSkinningLocations[eBoneMatrices], i32BoneCnt, GL_FALSE, amBoneWorld[0].ptr());
            glUniformMatrix3fv(m_gliSkinningLocations[eBoneMatricesIT], i32BoneCnt, GL_FALSE, amBoneWorldIT[0].f);
        }
        else
        {
            glUniform1i(m_gliSkinningLocations[eBoneCount], 0);
        }
        /*
        As we are using bone batching we don't want to draw all the faces contained within pMesh, we only want
        to draw the ones that are in the current batch. To do this we pass to the drawMesh function the offset
        to the start of the current batch of triangles (Mesh.sBoneBatches.pnBatchOffset[i32Batch]) and the
        total number of triangles to draw (i32Tris)
        */
        int i32Tris;
        if(i32Batch+1 < m_psMesh->sBoneBatches.nBatchCnt)
            i32Tris = m_psMesh->sBoneBatches.pnBatchOffset[i32Batch+1] - m_psMesh->sBoneBatches.pnBatchOffset[i32Batch];
        else
            i32Tris = m_psMesh->nNumFaces - m_psMesh->sBoneBatches.pnBatchOffset[i32Batch];

        glDrawElements(GL_TRIANGLES, i32Tris*3,
                       GL_UNSIGNED_SHORT,
                       &((unsigned short*)m_psMesh->sFaces.pData)[3*m_psMesh->sBoneBatches.pnBatchOffset[i32Batch]]);
    }
}
コード例 #20
0
/*!****************************************************************************
 @Function		RenderScene
 @Return		bool		true if no error occurred
 @Description	Main rendering loop function of the program. The shell will
				call this function every frame.
				eglSwapBuffers() will be performed by PVRShell automatically.
				PVRShell will also manage important OS events.
				Will also manage relevant OS events. The user has access to
				these events through an abstraction layer provided by PVRShell.
******************************************************************************/
bool OGLES3Skybox2::RenderScene()
{
	unsigned int i, j;

	// Clears the colour and depth buffer
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	/*
		Calculates the frame number to animate in a time-based manner.
		Uses the shell function PVRShellGetTime() to get the time in milliseconds.
	*/

	unsigned long iTime = PVRShellGetTime();

	if(!bPause)
	{
		// Calculate the model view matrix turning around the balloon
		ComputeViewMatrix();

		if(iTime > m_iTimePrev)
		{
			float fDelta = (float) (iTime - m_iTimePrev) * g_fFrameRate;
			m_fFrame   += fDelta;
			fDemoFrame += fDelta;
			fBurnAnim  += fDelta * 0.02f;

			if(fBurnAnim >= 1.0f)
				fBurnAnim = 1.0f;
		}
	}

	m_iTimePrev	= iTime;

	/* KeyBoard input processing */

	if(PVRShellIsKeyPressed(PVRShellKeyNameACTION1))
		bPause=!bPause;

	if(PVRShellIsKeyPressed(PVRShellKeyNameACTION2))
		fBurnAnim = 0.0f;

	/* Keyboard Animation and Automatic Shader Change over time */
	if(!bPause && (fDemoFrame > 500 || (m_i32Effect == 2 && fDemoFrame > 80)))
	{
		if(++m_i32Effect >= (int) g_ui32NoOfEffects)
		{
			m_i32Effect = 1;
			m_fFrame = 0.0f;
		}

		fDemoFrame = 0.0f;
		fBurnAnim  = 0.0f;
	}

	/* Change Shader Effect */

	if(PVRShellIsKeyPressed(PVRShellKeyNameRIGHT))
	{
		if(++m_i32Effect >= (int) g_ui32NoOfEffects)
			m_i32Effect = 1;

		fDemoFrame = 0.0f;
		fBurnAnim  = 0.0f;
		m_fFrame = 0.0f;
	}
	if(PVRShellIsKeyPressed(PVRShellKeyNameLEFT))
	{
		if(--m_i32Effect < 1)
			m_i32Effect = g_ui32NoOfEffects - 1;

		fDemoFrame = 0.0f;
		fBurnAnim  = 0.0f;
		m_fFrame = 0.0f;
	}

	/* Change Skybox Texture */
	if(PVRShellIsKeyPressed(PVRShellKeyNameUP))
	{
		for(i = 0; i < g_ui32NoOfEffects; ++i)
			ChangeSkyboxTo(m_ppEffects[i], m_ui32TextureIDs[4]);

		fBurnAnim = 0.0f;
	}

	if(PVRShellIsKeyPressed(PVRShellKeyNameDOWN))
	{
		for(i = 0; i < g_ui32NoOfEffects; ++i)
			ChangeSkyboxTo(m_ppEffects[i], m_ui32TextureIDs[3]);

		fBurnAnim = 0.0f;
	}

	/* Setup Shader and Shader Constants */
	int location;

	glDisable(GL_CULL_FACE);

	DrawSkybox();

	glEnable(GL_CULL_FACE);

	m_ppEffects[m_i32Effect]->Activate();

	for(i = 0; i < m_Scene.nNumMeshNode; i++)
	{
		SPODNode* pNode = &m_Scene.pNode[i];

		// Gets pMesh referenced by the pNode
		SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx];

		// Gets the node model matrix
		PVRTMat4 mWorld, mWORLDVIEW;
		mWorld = m_Scene.GetWorldMatrix(*pNode);

		mWORLDVIEW = m_mView * mWorld;

		glBindBuffer(GL_ARRAY_BUFFER, m_aiVboID[i]);

		const CPVRTArray<SPVRTPFXUniform>& Uniforms = m_ppEffects[m_i32Effect]->GetUniformArray();
		for(j = 0; j < Uniforms.GetSize(); ++j)
		{
			switch(Uniforms[j].nSemantic)
			{
				case ePVRTPFX_UsPOSITION:
				{
					glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData);
					glEnableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
				case ePVRTPFX_UsNORMAL:
				{
					glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData);
					glEnableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
				case ePVRTPFX_UsUV:
				{
					glVertexAttribPointer(Uniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData);
					glEnableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
				case ePVRTPFX_UsWORLDVIEWPROJECTION:
				{
					PVRTMat4 mMVP;

					/* Passes the model-view-projection matrix (MVP) to the shader to transform the vertices */
					mMVP = m_mProjection * mWORLDVIEW;
					glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mMVP.f);
				}
				break;
				case ePVRTPFX_UsWORLDVIEW:
				{
					glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mWORLDVIEW.f);
				}
				break;
				case ePVRTPFX_UsWORLDVIEWIT:
				{
					PVRTMat4 mWORLDVIEWI, mWORLDVIEWIT;

					mWORLDVIEWI = mWORLDVIEW.inverse();
					mWORLDVIEWIT= mWORLDVIEWI.transpose();

					PVRTMat3 WORLDVIEWIT = PVRTMat3(mWORLDVIEWIT);

					glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, WORLDVIEWIT.f);
				}
				break;
				case ePVRTPFX_UsVIEWIT:
				{
					PVRTMat4 mViewI, mViewIT;

					mViewI  = m_mView.inverse();
					mViewIT = mViewI.transpose();

					PVRTMat3 ViewIT = PVRTMat3(mViewIT);

					glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, ViewIT.f);
				}
				break;
				case ePVRTPFX_UsLIGHTDIREYE:
				{
					PVRTVec4 vLightDirectionEyeSpace;

					// Passes the light direction in eye space to the shader
					vLightDirectionEyeSpace = m_mView * PVRTVec4(1.0,1.0,-1.0,0.0);
					glUniform3f(Uniforms[j].nLocation, vLightDirectionEyeSpace.x, vLightDirectionEyeSpace.y, vLightDirectionEyeSpace.z);
				}
				break;
				case ePVRTPFX_UsTEXTURE:
				{
					// Set the sampler variable to the texture unit
					glUniform1i(Uniforms[j].nLocation, Uniforms[j].nIdx);
				}
				break;
			}
		}

		location = glGetUniformLocation(m_ppEffects[m_i32Effect]->GetProgramHandle(), "myEyePos");

		if(location != -1)
			glUniform3f(location, vCameraPosition.x, vCameraPosition.y, vCameraPosition.z);

		//set animation
		location = glGetUniformLocation(m_ppEffects[m_i32Effect]->GetProgramHandle(), "fAnim");

		if(location != -1)
			glUniform1f(location, fBurnAnim);

		location = glGetUniformLocation(m_ppEffects[m_i32Effect]->GetProgramHandle(), "myFrame");

		if(location != -1)
			glUniform1f(location, m_fFrame);

		if(g_bBlendShader[m_i32Effect])
		{
			glEnable(GL_BLEND);

			// Correct render order for alpha blending through culling
			// Draw Back faces
			glCullFace(GL_FRONT);

			location = glGetUniformLocation(m_ppEffects[m_i32Effect]->GetProgramHandle(), "bBackFace");

			glUniform1i(location, 1);

			DrawMesh(pMesh);

			glUniform1i(location, 0);

			glCullFace(GL_BACK);
		}
		else
		{
			location = glGetUniformLocation(m_ppEffects[m_i32Effect]->GetProgramHandle(), "bBackFace");

			if(location != -1)
				glUniform1i(location, 0);

			glDisable(GL_BLEND);
		}

		/* Everything should now be setup, therefore draw the mesh*/
		DrawMesh(pMesh);

		glBindBuffer(GL_ARRAY_BUFFER, 0);

		for(j = 0; j < Uniforms.GetSize(); ++j)
		{
			switch(Uniforms[j].nSemantic)
			{
			case ePVRTPFX_UsPOSITION:
				{
					glDisableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
			case ePVRTPFX_UsNORMAL:
				{
					glDisableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
			case ePVRTPFX_UsUV:
				{
					glDisableVertexAttribArray(Uniforms[j].nLocation);
				}
				break;
			}
		}
	}

	// Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools
	if(!bPause)
		m_Print3D.DisplayDefaultTitle("Skybox2", "", ePVRTPrint3DSDKLogo);
	else
		m_Print3D.DisplayDefaultTitle("Skybox2", "Paused", ePVRTPrint3DSDKLogo);

	m_Print3D.Flush();

	return true;
}
コード例 #21
0
ファイル: OGLESFur.cpp プロジェクト: deepbansal15/Native_SDK
/*!****************************************************************************
 @Function		UpdateFurShells
 @Description	Update the fur shells. This is only called when the number of
				shells change.
******************************************************************************/
void OGLESFur::UpdateFurShells()
{
	PVRTVec3	*pvSrcN, *pvSrcV;
	PVRTVec3	vTransNorm;
	PVRTVec4	vTransPos;
	SVertex		*pvData;
	int				i;
	unsigned int	j;
	float		fDepth, *pUV;

	int i32MeshIndex = m_Scene.pNode[eDuckBody].nIdx;
	SPODMesh* pMesh = &m_Scene.pMesh[i32MeshIndex];

	PVRTMat4 mModel;
	PVRTMat3 mModel3;

	m_Scene.GetWorldMatrix(mModel, m_Scene.pNode[eDuckBody]);
	mModel3 = PVRTMat3(mModel);

	pvData = new SVertex[pMesh->nNumVertex];

	if(!pvData)
		return;

	for(i = 0; i < m_i32FurShellNo; ++i)
	{
		fDepth = (c_fFurDepth * (float)(i+1) / (float)m_i32FurShellNo);

		for(j = 0; j < pMesh->nNumVertex; ++j)
		{
			pvSrcN	= (PVRTVec3*) (pMesh->pInterleaved + (size_t) pMesh->sNormals.pData + (j * pMesh->sNormals.nStride));
			pvSrcV	= (PVRTVec3*) (pMesh->pInterleaved + (size_t) pMesh->sVertex.pData  + (j * pMesh->sVertex.nStride));
			pUV		= (float*) (pMesh->pInterleaved + (size_t) pMesh->psUVW[0].pData + (j * pMesh->psUVW[0].nStride));

			// Transform the vertex position so it is in world space
			PVRTVec4 vPos4 = PVRTVec4(*pvSrcV, 1.0f);
			PVRTTransform(&vTransPos, &vPos4, &mModel);

			// Transform the vertex normal so it is in world space
			vTransNorm.x = mModel.f[0] * pvSrcN->x + mModel.f[4] * pvSrcN->y + mModel.f[8] * pvSrcN->z;
			vTransNorm.y = mModel.f[1] * pvSrcN->x + mModel.f[5] * pvSrcN->y + mModel.f[9] * pvSrcN->z;
			vTransNorm.z = mModel.f[2] * pvSrcN->x + mModel.f[6] * pvSrcN->y + mModel.f[10]* pvSrcN->z;
			vTransNorm.normalize();

			pvData[j].x = vTransPos.x + (vTransNorm.x * fDepth);
			pvData[j].y = vTransPos.y + (vTransNorm.y * fDepth);
			pvData[j].z = vTransPos.z + (vTransNorm.z * fDepth);

			pvData[j].nx = vTransNorm.x;
			pvData[j].ny = vTransNorm.y;
			pvData[j].nz = vTransNorm.z;

			pvData[j].tu = pUV[0];
			pvData[j].tv = pUV[1];
		}

		glBindBuffer(GL_ARRAY_BUFFER, m_uiShellVbo[i]);
		unsigned int uiSize = pMesh->nNumVertex * sizeof(SVertex);
		glBufferData(GL_ARRAY_BUFFER, uiSize, pvData, GL_STATIC_DRAW);
		glBindBuffer(GL_ARRAY_BUFFER, 0);
	}

	delete[] pvData;
}