/*!**************************************************************************** @Function RenderSceneWithEffect @Return bool true if no error occured @Description Renders the whole scene with a single effect. ******************************************************************************/ bool OGLES3ShadowMapping::RenderSceneWithEffect(const int uiEffectId, const PVRTMat4 &mProjection, const PVRTMat4 &mView) { CPVRTPFXEffect *pEffect = m_ppPFXEffects[uiEffectId]; // Activate the passed effect pEffect->Activate(); for (unsigned int i=0; i < m_Scene.nNumMeshNode; i++) { SPODNode* pNode = &m_Scene.pNode[i]; SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx]; SPODMaterial *pMaterial = 0; if (pNode->nIdxMaterial != -1) { pMaterial = &m_Scene.pMaterial[pNode->nIdxMaterial]; // Bind the texture if there is one bound to this object if (pMaterial->nIdxTexDiffuse != -1) { CPVRTString texname = CPVRTString(m_Scene.pTexture[pMaterial->nIdxTexDiffuse].pszName).substitute(".png", ""); CPVRTStringHash hashedName(texname); if (m_TextureCache.Exists(hashedName)) glBindTexture(GL_TEXTURE_2D, m_TextureCache[hashedName]); } } glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[i]); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[i]); // Pre-calculate commonly used matrices PVRTMat4 mWorld; m_Scene.GetWorldMatrix(mWorld, *pNode); PVRTMat4 mWorldView = mView * mWorld; // Bind semantics const CPVRTArray<SPVRTPFXUniform>& Uniforms = pEffect->GetUniformArray(); for(unsigned int j = 0; j < Uniforms.GetSize(); ++j) { switch(Uniforms[j].nSemantic) { case ePVRTPFX_UsPOSITION: { glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData); glEnableVertexAttribArray(Uniforms[j].nLocation); } break; case ePVRTPFX_UsNORMAL: { glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData); glEnableVertexAttribArray(Uniforms[j].nLocation); } break; case ePVRTPFX_UsUV: { glVertexAttribPointer(Uniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData); glEnableVertexAttribArray(Uniforms[j].nLocation); } break; case ePVRTPFX_UsMATERIALCOLORDIFFUSE: { if (pMaterial) glUniform4f(Uniforms[j].nLocation, pMaterial->pfMatDiffuse[0], pMaterial->pfMatDiffuse[1], pMaterial->pfMatDiffuse[2], 1.0f); } break; case ePVRTPFX_UsWORLDVIEWPROJECTION: { PVRTMat4 mWorldViewProj = mProjection * mWorldView; glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mWorldViewProj.f); } break; case ePVRTPFX_UsWORLDI: { PVRTMat3 mWorldI3x3(mWorld.inverse()); glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, mWorldI3x3.f); } break; case ePVRTPFX_UsWORLDVIEWIT: { PVRTMat3 mWorldViewIT3x3(mWorldView.inverse().transpose()); glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, mWorldViewIT3x3.f); } break; case ePVRTPFX_UsTEXTURE: { // Set the sampler variable to the texture unit glUniform1i(Uniforms[j].nLocation, Uniforms[j].nIdx); } break; case ePVRTPFX_UsLIGHTPOSWORLD: { glUniform3fv(Uniforms[j].nLocation, 1, m_vLightPosition.ptr()); } break; case eCUSTOMSEMANTIC_SHADOWTRANSMATRIX: { // We need to calculate the texture projection matrix. This matrix takes the pixels from world space to previously rendered light projection space //where we can look up values from our saved depth buffer. The matrix is constructed from the light view and projection matrices as used for the previous render and //then multiplied by the inverse of the current view matrix. //PVRTMat4 mTextureMatrix = m_mBiasMatrix * m_mLightProjection * m_mLightView * mView.inverse(); PVRTMat4 mTextureMatrix = m_mBiasMatrix * m_mLightProjection * m_mLightView * mWorld; glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mTextureMatrix.f); } break; case ePVRTPFX_UsRANDOM: { glUniform1f(Uniforms[j].nLocation, m_fBias); } break; default: { PVRShellOutputDebug("Error: Unhandled semantic in RenderSceneWithEffect()\n"); return false; } } } // Now that all uniforms are set and the materials ready, draw the mesh. glDrawElements(GL_TRIANGLES, pMesh->nNumFaces*3, GL_UNSIGNED_SHORT, 0); // Disable all vertex attributes for(unsigned int j = 0; j < Uniforms.GetSize(); ++j) { switch(Uniforms[j].nSemantic) { case ePVRTPFX_UsPOSITION: case ePVRTPFX_UsNORMAL: case ePVRTPFX_UsUV: glDisableVertexAttribArray(Uniforms[j].nLocation); break; } } } glBindBuffer(GL_ARRAY_BUFFER, 0); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); return true; }
/*!**************************************************************************** @Function RenderScene @Return bool true if no error occured @Description Main rendering loop function of the program. The shell will call this function every frame. eglSwapBuffers() will be performed by PVRShell automatically. PVRShell will also manage important OS events. Will also manage relevent OS events. The user has access to these events through an abstraction layer provided by PVRShell. ******************************************************************************/ bool OGLESIntroducingPFX::RenderScene() { // Clears the color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Use the loaded effect m_pEffect->Activate(); /* Calculates the frame number to animate in a time-based manner. Uses the shell function PVRShellGetTime() to get the time in milliseconds. */ int iTime = PVRShellGetTime(); int iDeltaTime = iTime - m_iTimePrev; m_iTimePrev = iTime; m_fFrame += (float)iDeltaTime * DEMO_FRAME_RATE; if (m_fFrame > m_Scene.nNumFrame-1) m_fFrame = 0; // Sets the scene animation to this frame m_Scene.SetFrame(m_fFrame); { PVRTVec3 vFrom, vTo, vUp; VERTTYPE fFOV; vUp.x = 0.0f; vUp.y = 1.0f; vUp.z = 0.0f; // We can get the camera position, target and field of view (fov) with GetCameraPos() fFOV = m_Scene.GetCameraPos(vFrom, vTo, 0) * 0.4f; /* We can build the world view matrix from the camera position, target and an up vector. For this we use PVRTMat4LookAtRH(). */ m_mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp); // Calculates the projection matrix bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen); m_mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), CAM_NEAR, CAM_FAR, PVRTMat4::OGL, bRotate); } /* A scene is composed of nodes. There are 3 types of nodes: - MeshNodes : references a mesh in the pMesh[]. These nodes are at the beginning of the pNode[] array. And there are nNumMeshNode number of them. This way the .pod format can instantiate several times the same mesh with different attributes. - lights - cameras To draw a scene, you must go through all the MeshNodes and draw the referenced meshes. */ for (int i=0; i<(int)m_Scene.nNumMeshNode; i++) { SPODNode* pNode = &m_Scene.pNode[i]; // Gets pMesh referenced by the pNode SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx]; glBindBuffer(GL_ARRAY_BUFFER, m_aiVboID[i]); // Gets the node model matrix PVRTMat4 mWorld; mWorld = m_Scene.GetWorldMatrix(*pNode); PVRTMat4 mWorldView; mWorldView = m_mView * mWorld; for(unsigned int j = 0; j < m_nUniformCnt; ++j) { switch(m_psUniforms[j].nSemantic) { case eUsPOSITION: { glVertexAttribPointer(m_psUniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData); glEnableVertexAttribArray(m_psUniforms[j].nLocation); } break; case eUsNORMAL: { glVertexAttribPointer(m_psUniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData); glEnableVertexAttribArray(m_psUniforms[j].nLocation); } break; case eUsUV: { glVertexAttribPointer(m_psUniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData); glEnableVertexAttribArray(m_psUniforms[j].nLocation); } break; case eUsWORLDVIEWPROJECTION: { PVRTMat4 mWVP; /* Passes the world-view-projection matrix (WVP) to the shader to transform the vertices */ mWVP = m_mProjection * mWorldView; glUniformMatrix4fv(m_psUniforms[j].nLocation, 1, GL_FALSE, mWVP.f); } break; case eUsWORLDVIEWIT: { PVRTMat4 mWorldViewI, mWorldViewIT; /* Passes the inverse transpose of the world-view matrix to the shader to transform the normals */ mWorldViewI = mWorldView.inverse(); mWorldViewIT = mWorldViewI.transpose(); PVRTMat3 WorldViewIT = PVRTMat3(mWorldViewIT); glUniformMatrix3fv(m_psUniforms[j].nLocation, 1, GL_FALSE, WorldViewIT.f); } break; case eUsLIGHTDIREYE: { // Reads the light direction from the scene. PVRTVec4 vLightDirection; PVRTVec3 vPos; vLightDirection = m_Scene.GetLightDirection(0); vLightDirection.x = -vLightDirection.x; vLightDirection.y = -vLightDirection.y; vLightDirection.z = -vLightDirection.z; /* Sets the w component to 0, so when passing it to glLight(), it is considered as a directional light (as opposed to a spot light). */ vLightDirection.w = 0; // Passes the light direction in eye space to the shader PVRTVec4 vLightDirectionEyeSpace; vLightDirectionEyeSpace = m_mView * vLightDirection; glUniform3f(m_psUniforms[j].nLocation, vLightDirectionEyeSpace.x, vLightDirectionEyeSpace.y, vLightDirectionEyeSpace.z); } break; case eUsTEXTURE: { // Set the sampler variable to the texture unit glUniform1i(m_psUniforms[j].nLocation, m_psUniforms[j].nIdx); } break; } } /* Now that the model-view matrix is set and the materials ready, call another function to actually draw the mesh. */ DrawMesh(pMesh); glBindBuffer(GL_ARRAY_BUFFER, 0); for(unsigned int j = 0; j < m_nUniformCnt; ++j) { switch(m_psUniforms[j].nSemantic) { case eUsPOSITION: { glDisableVertexAttribArray(m_psUniforms[j].nLocation); } break; case eUsNORMAL: { glDisableVertexAttribArray(m_psUniforms[j].nLocation); } break; case eUsUV: { glDisableVertexAttribArray(m_psUniforms[j].nLocation); } break; } } } // Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools m_Print3D.DisplayDefaultTitle("IntroducingPFX", "", ePVRTPrint3DLogoIMG); m_Print3D.Flush(); return true; }
/*!**************************************************************************** @Function RenderScene @Return bool true if no error occured @Description Main rendering loop function of the program. The shell will call this function every frame. eglSwapBuffers() will be performed by PVRShell automatically. PVRShell will also manage important OS events. Will also manage relevent OS events. The user has access to these events through an abstraction layer provided by PVRShell. ******************************************************************************/ bool OGLES2MaximumIntensityBlend::RenderScene() { // Clears the color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Enable blending glEnable(GL_BLEND); glBlendEquation(GL_MAX_EXT); /* Calculates the frame number to animate in a time-based manner. Uses the shell function PVRShellGetTime() to get the time in milliseconds. */ unsigned long ulTime = PVRShellGetTime(); unsigned long ulDeltaTime = ulTime - m_ulTimePrev; m_ulTimePrev = ulTime; m_fFrame += (float)ulDeltaTime * DEMO_FRAME_RATE; if (m_fFrame > m_Scene.nNumFrame-1) m_fFrame = 0; // Sets the scene animation to this frame m_Scene.SetFrame(m_fFrame); PVRTVec3 vLightDir; { PVRTVec3 vFrom, vTo, vUp; VERTTYPE fFOV; vUp.x = 0.0f; vUp.y = 1.0f; vUp.z = 0.0f; // We can get the camera position, target and field of view (fov) with GetCameraPos() fFOV = m_Scene.GetCameraPos(vFrom, vTo, 0) * 0.6; /* We can build the world view matrix from the camera position, target and an up vector. For this we use PVRTMat4LookAtRH(). */ m_mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp); vLightDir = vFrom; // Calculates the projection matrix bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen); m_mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), CAM_NEAR, CAM_FAR, PVRTMat4::OGL, bRotate); } /* A scene is composed of nodes. There are 3 types of nodes: - MeshNodes : references a mesh in the pMesh[]. These nodes are at the beginning of the pNode[] array. And there are nNumMeshNode number of them. This way the .pod format can instantiate several times the same mesh with different attributes. - lights - cameras To draw a scene, you must go through all the MeshNodes and draw the referenced meshes. */ for (int i=0; i<(int)m_Scene.nNumMeshNode; i++) { SPODNode* pNode = &m_Scene.pNode[i]; // Gets pMesh referenced by the pNode SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx]; glBindBuffer(GL_ARRAY_BUFFER, m_aiVboID[i]); // Gets the node model matrix PVRTMat4 mWorld; mWorld = m_Scene.GetWorldMatrix(*pNode); PVRTMat4 mWorldView; mWorldView = m_mView * mWorld; // Retrieve the list of required uniforms CPVRTPFXEffect* pEffect; SPODMaterial* pMat = &m_Scene.pMaterial[pNode->nIdxMaterial]; if(pMat->nIdxTexDiffuse != -1) { pEffect = m_pEffectTextured; } else { pEffect = m_pEffect; } pEffect->Activate(); const CPVRTArray<SPVRTPFXUniform>& aUniforms = pEffect->GetUniformArray(); /* Now we loop over the uniforms requested by the PFX file. Using the switch statement allows us to handle all of the required semantics */ for(unsigned int j = 0; j < aUniforms.GetSize(); ++j) { switch(aUniforms[j].nSemantic) { case ePVRTPFX_UsPOSITION: { glVertexAttribPointer(aUniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData); glEnableVertexAttribArray(aUniforms[j].nLocation); } break; case ePVRTPFX_UsNORMAL: { glVertexAttribPointer(aUniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData); glEnableVertexAttribArray(aUniforms[j].nLocation); } break; case ePVRTPFX_UsUV: { glVertexAttribPointer(aUniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData); glEnableVertexAttribArray(aUniforms[j].nLocation); } break; case ePVRTPFX_UsWORLDVIEWPROJECTION: { PVRTMat4 mWVP; // Passes the world-view-projection matrix (WVP) to the shader to transform the vertices mWVP = m_mProjection * mWorldView; glUniformMatrix4fv(aUniforms[j].nLocation, 1, GL_FALSE, mWVP.f); } break; case eUsINTENSITY: { int iMat = pNode->nIdxMaterial; SPODMaterial* pMat = &m_Scene.pMaterial[iMat]; float fIntensity = pMat->pfMatDiffuse[0]; // Take R value for intensity glUniform1f(aUniforms[j].nLocation, fIntensity); } break; case ePVRTPFX_UsTEXTURE: { glUniform1i(aUniforms[j].nLocation, 0); } break; case ePVRTPFX_UsWORLDVIEWIT: { PVRTMat3 mWorldViewIT3x3(mWorldView.inverse().transpose()); glUniformMatrix3fv(aUniforms[j].nLocation, 1, GL_FALSE, mWorldViewIT3x3.f); } break; case ePVRTPFX_UsLIGHTDIREYE: { PVRTVec4 vLightDirView = (m_mView * PVRTVec4(-vLightDir, 1.0f)).normalize(); glUniform3fv(aUniforms[j].nLocation, 1, vLightDirView.ptr()); } break; } } /* Now that the model-view matrix is set and the materials ready, call another function to actually draw the mesh. */ DrawMesh(pMesh); glBindBuffer(GL_ARRAY_BUFFER, 0); /* Now disable all of the enabled attribute arrays that the PFX requested. */ for(unsigned int j = 0; j < aUniforms.GetSize(); ++j) { switch(aUniforms[j].nSemantic) { case ePVRTPFX_UsNORMAL: case ePVRTPFX_UsUV: case ePVRTPFX_UsPOSITION: { glDisableVertexAttribArray(aUniforms[j].nLocation); } break; } } } // Reset blending // Enable blending glBlendEquation(GL_FUNC_ADD); glDisable(GL_BLEND); // Determine which title to show. The default title is quite long, so we display a shortened version if // it cannot fit on the screen. const char* pszTitle = NULL; { bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen); float fW, fH; m_Print3D.MeasureText(&fW, &fH, 1.0f, c_pszTitle); int iScreenW = bRotate ? PVRShellGet(prefHeight) : PVRShellGet(prefWidth); if((int)fW >= iScreenW) { pszTitle = c_pszTitleShort; } else { pszTitle = c_pszTitle; } } // Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools m_Print3D.DisplayDefaultTitle(pszTitle, "", ePVRTPrint3DSDKLogo); m_Print3D.Flush(); return true; }