/*!**************************************************************************** @Function DrawBalloons @Input psProgram Program to use mProjection Projection matrix to use mView View matrix to use pmModels A pointer to an array of model matrices iNum Number of balloons to draw @Description Draws balloons. ******************************************************************************/ void OGLES2Glass::DrawBalloons(Program* psProgram, PVRTMat4 mProjection, PVRTMat4 mView, PVRTMat4* pmModels, int iNum) { // Use shader program glUseProgram(psProgram->uiId); // Bind texture glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_uiBalloonTex); PVRTMat4 mModelView, mMVP; for (int i = 0; i < iNum; ++i) { mModelView = mView * pmModels[i]; mMVP = mProjection * mModelView; glUniformMatrix4fv(psProgram->auiLoc[eMVMatrix], 1, GL_FALSE, mModelView.ptr()); glUniformMatrix4fv(psProgram->auiLoc[eMVPMatrix], 1, GL_FALSE, mMVP.ptr()); // Calculate and set the model space light direction PVRTVec3 vLightDir = pmModels[i].inverse() * PVRTVec4(19, 22, -50, 0); vLightDir = vLightDir.normalize(); glUniform3fv(psProgram->auiLoc[eLightDir], 1, vLightDir.ptr()); // Calculate and set the model space eye position PVRTVec3 vEyePos = mModelView.inverse() * PVRTVec4(0.0f, 0.0f, 0.0f, 1.0f); glUniform3fv(psProgram->auiLoc[eEyePos], 1, vEyePos.ptr()); // Now that the uniforms are set, call another function to actually draw the mesh. DrawMesh(0, &m_Balloon, &m_puiBalloonVbo, &m_puiBalloonIndexVbo, 3); } }
void BulletNode::FlyState() { mBulletPos += mDir * mSpeed; mBulletPos.y -= mGravity; mSpeed -= mFriction; if (mHit) { mState = state_hit; return; } if (mSpeed < 0.0f || mBulletPos.y < 0.0f) { mState = state_missed; } else { PVRTVec3 fromStart = mBulletPos - mBulletStartingPos; float fromStartDist = fromStart.lenSqr(); if (fromStartDist > MM(mRange)) { mState = state_missed; } } }
/*!**************************************************************************** @Function DrawSkybox @Description Draws the skybox onto the screen. ******************************************************************************/ void OGLES2Glass::DrawSkybox() { glUseProgram(m_SkyboxProgram.uiId); PVRTMat4 mVP = m_mProjection * m_mView; PVRTMat4 mInvVP = mVP.inverseEx(); glUniformMatrix4fv(m_SkyboxProgram.auiLoc[eInvVPMatrix], 1, GL_FALSE, mInvVP.ptr()); PVRTVec3 vEyePos = m_mView.inverse() * PVRTVec4(0, 0, 0, 1); glUniform3fv(m_SkyboxProgram.auiLoc[eEyePos], 1, vEyePos.ptr()); glBindBuffer(GL_ARRAY_BUFFER, m_uiSquareVbo); glEnableVertexAttribArray(VERTEX_ARRAY); glVertexAttribPointer(VERTEX_ARRAY, 3, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 3, 0); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_CUBE_MAP, m_uiCubeTex); glDrawArrays(GL_TRIANGLES, 0, 6); glDisableVertexAttribArray(VERTEX_ARRAY); glBindBuffer(GL_ARRAY_BUFFER, 0); }
/*!**************************************************************************** @Function RenderScene @Return bool true if no error occured @Description Main rendering loop function of the program. The shell will call this function every frame. eglSwapBuffers() will be performed by PVRShell automatically. PVRShell will also manage important OS events. Will also manage relevent OS events. The user has access to these events through an abstraction layer provided by PVRShell. ******************************************************************************/ bool OGLES2Fog::RenderScene() { // Clear the color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Keyboard input (cursor to change fog function) if (PVRShellIsKeyPressed(PVRShellKeyNameLEFT)) { m_eFogMode = EFogMode((m_eFogMode + eNumFogModes - 1) % eNumFogModes); } if (PVRShellIsKeyPressed(PVRShellKeyNameRIGHT)) { m_eFogMode = EFogMode((m_eFogMode + 1) % eNumFogModes); } // Use the loaded shader program glUseProgram(m_ShaderProgram.uiId); // Bind texture glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_uiTexture); // Set uniforms glUniform1i(m_ShaderProgram.uiFogFuncLoc, m_eFogMode); // Rotate and translate the model matrix PVRTMat4 mModel = PVRTMat4::RotationY(m_fAngleY); m_fAngleY += PVRT_PI / 90; mModel.preTranslate(0, 0, 500 * cos(m_fPositionZ) - 450); m_fPositionZ += (2*PVRT_PI)*0.0008f; // Feed Projection and Model View matrices to the shaders PVRTMat4 mModelView = m_mView * mModel; PVRTMat4 mMVP = m_mProjection * mModelView; glUniformMatrix4fv(m_ShaderProgram.uiModelViewLoc, 1, GL_FALSE, mModelView.ptr()); glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.ptr()); // Pass the light direction transformed with the inverse of the ModelView matrix // This saves the transformation of the normals per vertex. A simple dot3 between this direction // and the un-transformed normal will allow proper smooth shading. PVRTVec3 vMsLightDir = (PVRTMat3(mModel).inverse() * PVRTVec3(1, 1, 1)).normalized(); glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, vMsLightDir.ptr()); /* Now that the model-view matrix is set and the materials ready, call another function to actually draw the mesh. */ DrawMesh(0); // Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools m_Print3D.DisplayDefaultTitle("Fog", "", ePVRTPrint3DLogoIMG); m_Print3D.Print3D(0.3f, 7.5f, 0.75f, PVRTRGBA(255,255,255,255), "Fog Mode: %s", g_FogFunctionList[m_eFogMode]); m_Print3D.Flush(); return true; }
// --------------------------------------------------------------- void MyPVRDemo::RenderStatue(const PVRTMat4& mxModel, const PVRTMat4& mxCam, const PVRTVec3& vLightPos, const StatueShader* pShader) { PVRTMat4 mxModelView = mxCam * mxModel; PVRTMat4 mxMVP = m_mxProjection * mxModelView; PVRTVec3 vLightPosModel = vLightPos; // Light position in World space glUniform3fv(pShader->uiLightPos, 1, vLightPosModel.ptr()); glUniformMatrix4fv(pShader->uiMVP, 1, GL_FALSE, mxMVP.ptr()); glUniformMatrix4fv(pShader->uiModelView, 1, GL_FALSE, mxModelView.ptr()); DrawMesh(enumMODEL_Statue, FLAG_VRT | FLAG_TEX0 | FLAG_NRM | FLAG_TAN); }
/*!**************************************************************************** @Function ReleaseView @Return bool true if no error occured @Description Code in ReleaseView() will be called by PVRShell when the application quits or before a change in the rendering context. ******************************************************************************/ bool OGLES3EdgeDetection::ReleaseView() { // Delete the color texture glDeleteTextures(1, &m_uiColorTexture); // Delete the depth render buffer glDeleteRenderbuffers(1, &m_uiDepthRenderbuffer); // delete shader program , and shaders glDeleteProgram(m_PreShader.uiId); glDeleteShader(m_uiPreVertShader); glDeleteShader(m_uiPreFragShader); for (int i=0; i<eNumPostShaders; ++i) { glDeleteProgram(m_PostShaders[i].uiId); glDeleteShader(m_uiPostVertShaders[i]); glDeleteShader(m_uiPostFragShaders[i]); } // Delete the stored color data. delete [] m_pvColorData->ptr(); m_pvColorData=NULL; // Release Print3D Textures m_Print3D.ReleaseTextures(); // Delete frame buffer objects glDeleteFramebuffers(1, &m_uiFramebufferObject); return true; }
void SimpleCamera::updatePosition() { // Most of this stuff is to try and smooth movement when controlled by the primitive keyboard input available PVRTVec3 vDec = m_vVelocity * f2vt(TimeController::inst().getDeltaTime()) * m_fMoveSpeed * f2vt(0.1f); while(vDec.lenSqr()>m_vVelocity.lenSqr()) { vDec /= f2vt(2.0f); } m_vVelocity -= vDec; if(m_vVelocity.lenSqr()>m_fMoveSpeed*m_fMoveSpeed) { m_vVelocity = m_vVelocity.normalized()*m_fMoveSpeed; } m_vPosition += m_vVelocity * f2vt((float)TimeController::inst().getDeltaTime()); }
/*!**************************************************************************** @Function RenderScene @Return bool true if no error occured @Description Main rendering loop function of the program. The shell will call this function every frame. eglSwapBuffers() will be performed by PVRShell automatically. PVRShell will also manage important OS events. Will also manage relevent OS events. The user has access to these events through an abstraction layer provided by PVRShell. ******************************************************************************/ bool OGLES2FastTnL::RenderScene() { // Clear the color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Use shader program glUseProgram(m_ShaderProgram.uiId); // Bind texture glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_uiTexture); /* Now that the uniforms are set, call another function to actually draw the mesh. */ DrawMesh(0); // Rotate the model matrix PVRTMat4 mModel = PVRTMat4::RotationY(m_fAngleY); m_fAngleY += 0.02f; // Calculate model view projection matrix PVRTMat4 mMVP = m_mViewProj * mModel; // Feeds Projection Model View matrix to the shaders glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.ptr()); /* The inverse of a rotation matrix is the transposed matrix Because of v * M = transpose(M) * v, this means: v * R == inverse(R) * v So we don't have to actually invert or transpose the matrix to transform back from world space to model space */ PVRTVec3 vMsLightDir = (PVRTVec3(1, 1, 1) * PVRTMat3(mModel)).normalized(); glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, vMsLightDir.ptr()); // Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools m_Print3D.DisplayDefaultTitle("FastTnL", "", ePVRTPrint3DLogoIMG); m_Print3D.Flush(); return true; }
/*!**************************************************************************** @Function RenderScene @Return bool true if no error occured @Description Main rendering loop function of the program. The shell will call this function every frame. eglSwapBuffers() will be performed by PVRShell automatically. PVRShell will also manage important OS events. Will also manage relevent OS events. The user has access to these events through an abstraction layer provided by PVRShell. ******************************************************************************/ bool OGLES3CellShading::RenderScene() { // Clears the color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Use the loaded shader program glUseProgram(m_ShaderProgram.uiId); // Bind textures glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_uiShadingTex); // Calculate the model matrix PVRTMat4 mModel = PVRTMat4::RotationY(m_fAngleY); m_fAngleY += PVRT_PI / 210; // Set model view projection matrix PVRTMat4 mMVP = m_mViewProj * mModel; glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.ptr()); // Set eye position in model space PVRTVec4 vMsEyePos = PVRTVec4(0, 0, 125, 1) * mModel; glUniform3fv(m_ShaderProgram.uiEyePosLoc, 1, vMsEyePos.ptr()); // transform directional light from world space to model space PVRTVec3 vMsLightDir = PVRTVec3(PVRTVec4(1, 2, 1, 0) * mModel).normalized(); glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, vMsLightDir.ptr()); DrawMesh(0); // Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools m_Print3D.DisplayDefaultTitle("CellShading", "", ePVRTPrint3DSDKLogo); m_Print3D.Flush(); return true; }
void SceneManager::Update() { RenderLayerManager & renderManager = RenderLayerManager::GetRenderLayerManager(); const PVRTVec3 center = renderManager.GetCenter(); float occlusionRadius = renderManager.GetOcclusionRadius(); PVRTVec4 vecA( mLookMtx->f[12], 0.0f, mLookMtx->f[14], 1); PVRTVec4 vecB( GLOBAL_SCALE * FRUSTUM_W, 0.0f, GLOBAL_SCALE * FRUSTUM_D, 1); PVRTVec4 vecC( GLOBAL_SCALE * -FRUSTUM_W, 0.0f, GLOBAL_SCALE * FRUSTUM_D, 1); vecB = *mLookMtx * vecB; vecC = *mLookMtx * vecC; PVRTVec2 A(vecA.x, vecA.z); PVRTVec2 B(vecB.x, vecB.z); PVRTVec2 C(vecC.x, vecC.z); mToApplyCount = 0; if (mQuadTree) { static QuadNode * quadNodes[256]={0}; int quadNodeCount = 0; //mQuadTree->GetQuads(center.x, center.z, occlusionRadius, quadNodes, quadNodeCount); mQuadTree->GetQuadsCameraFrustum(quadNodes, quadNodeCount, mLookMtx); quadNodeCount--; bool useFrustumCulling = true; //!!!!!!!!!!!!!!!!!!!!! for (int quad = quadNodeCount ; quad >=0 ; quad--) { QuadNode * pQuadNode = quadNodes[quad]; List & dataList = pQuadNode->GetDataList(); ListIterator listIter(dataList); while( Node * pRootNode = (Node*)listIter.GetPtr() ) { if (!pRootNode->IsVisible()) continue; //pRootNode->UpdateWithoutChildren(); bool useOcclusionRadius = pRootNode->GetUseOcclusionCulling(); PVRTVec3 worldPos = pRootNode->GetWorldTranslation(); if (!useFrustumCulling && useOcclusionRadius) { PVRTVec3 distVec = worldPos - center; if ( distVec.lenSqr() < MM(occlusionRadius) ) { pRootNode->SetInFrustum(true); pRootNode->Update(); mToApply[mToApplyCount] = pRootNode; mToApplyCount++; } else { pRootNode->SetInFrustum(false); } } else if (useFrustumCulling) { PVRTVec2 P(worldPos.x, worldPos.z); PVRTVec2 v0 = C - A; PVRTVec2 v1 = B - A; PVRTVec2 v2 = P - A; // Compute dot products float dot00 = v0.dot(v0); float dot01 = v0.dot(v1); float dot02 = v0.dot(v2); float dot11 = v1.dot(v1); float dot12 = v1.dot(v2); // Compute barycentric coordinates float invDenom = 1.0f / (dot00 * dot11 - dot01 * dot01); float u = (dot11 * dot02 - dot01 * dot12) * invDenom; float v = (dot00 * dot12 - dot01 * dot02) * invDenom; bool addToList = false; // Check if point is in triangle //PVRTVec3 distVec = worldPos - center; //if ( distVec.lenSqr() < MM(occlusionRadius) ) { if ( (u > 0) && (v > 0) && (u + v < 1)) { addToList = true; } else if ( Collision::CircleTriangleEdgeIntersection(A,B,P, pRootNode->GetRadius() ) ) { addToList = true; } else if ( Collision::CircleTriangleEdgeIntersection(A,C,P, pRootNode->GetRadius() )) { addToList = true; } if (addToList) { pRootNode->SetInFrustum(true); //pRootNode->Update(); mToApply[mToApplyCount] = pRootNode; mToApplyCount++; } else { pRootNode->SetInFrustum(false); } } //else //{ // pRootNode->SetInFrustum(false); //} } else { pRootNode->SetInFrustum(true); //pRootNode->Update(); mToApply[mToApplyCount] = pRootNode; mToApplyCount++; } } } } for (int n=0;n<mNodeCount;n++) { Node * pRootNode = mRootNodes[n]; if (!pRootNode->IsVisible()) continue; pRootNode->UpdateWithoutChildren(); bool useOcclusionRadius = pRootNode->GetUseOcclusionCulling(); PVRTVec3 worldPos = pRootNode->GetWorldTranslation(); PVRTVec3 distVec = worldPos - center; if (useOcclusionRadius) { if ( distVec.lenSqr() < MM(occlusionRadius) ) { PVRTVec2 P(worldPos.x, worldPos.z); PVRTVec2 v0 = C - A; PVRTVec2 v1 = B - A; PVRTVec2 v2 = P - A; // Compute dot products float dot00 = v0.dot(v0); float dot01 = v0.dot(v1); float dot02 = v0.dot(v2); float dot11 = v1.dot(v1); float dot12 = v1.dot(v2); // Compute barycentric coordinates float invDenom = 1.0f / (dot00 * dot11 - dot01 * dot01); float u = (dot11 * dot02 - dot01 * dot12) * invDenom; float v = (dot00 * dot12 - dot01 * dot02) * invDenom; bool addToList = false; // Check if point is in triangle //PVRTVec3 distVec = worldPos - center; //if ( distVec.lenSqr() < MM(occlusionRadius) ) { if ( (u > 0) && (v > 0) && (u + v < 1)) { addToList = true; } else if ( Collision::CircleTriangleEdgeIntersection(A,B,P, pRootNode->GetRadius() ) ) { addToList = true; } else if ( Collision::CircleTriangleEdgeIntersection(A,C,P, pRootNode->GetRadius() )) { addToList = true; } if (addToList) { pRootNode->SetInFrustum(true); pRootNode->Update(); mToApply[mToApplyCount] = pRootNode; mToApplyCount++; } else { pRootNode->SetInFrustum(false); } } /* pRootNode->SetInFrustum(true); pRootNode->Update(); mToApply[mToApplyCount] = pRootNode; mToApplyCount++; */ } else { pRootNode->SetInFrustum(false); } } else { pRootNode->SetInFrustum(true); pRootNode->Update(); mToApply[mToApplyCount] = pRootNode; mToApplyCount++; } /* PVRTVec3 worldPos = pRootNode->GetWorldTranslation(); PVRTVec3 distVec = worldPos - center; if (!pRootNode->GetUseOcclusionCulling()) { pRootNode->SetInFrustum(true); } else if ( distVec.lenSqr() < occlusionRadius ) { pRootNode->SetInFrustum(true); } else { pRootNode->SetInFrustum(false); } */ } }
/*!**************************************************************************** @Function RenderScene @Return bool true if no error occured @Description Main rendering loop function of the program. The shell will call this function every frame. eglSwapBuffers() will be performed by PVRShell automatically. PVRShell will also manage important OS events. Will also manage relevent OS events. The user has access to these events through an abstraction layer provided by PVRShell. ******************************************************************************/ bool OGLES2AnisotropicLighting::RenderScene() { // Clear the color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Keyboard input (cursor to change render mode) if (PVRShellIsKeyPressed(PVRShellKeyNameLEFT)) { m_eRenderMode = ERenderMode((m_eRenderMode + eNumRenderModes - 1) % eNumRenderModes); } if (PVRShellIsKeyPressed(PVRShellKeyNameRIGHT)) { m_eRenderMode = ERenderMode((m_eRenderMode + 1) % eNumRenderModes); } // Rotate the model matrix PVRTMat4 mModel = PVRTMat4::RotationY(m_fAngleY); m_fAngleY += 0.02f; // Calculate model view projection matrix PVRTMat4 mMVP = m_mViewProj * mModel; if (m_eRenderMode == eTexLookup) { glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_uiTexture); glUseProgram(m_FastShader.uiId); glUniformMatrix4fv(m_FastShader.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.ptr()); /* The inverse of a rotation matrix is the transposed matrix Because of v * M = transpose(M) * v, this means: v * R == inverse(R) * v So we don't have to actually invert or transpose the matrix to transform back from world space to model space */ PVRTVec3 vMsEyePos = PVRTVec3(PVRTVec4(0, 0, 150, 1) * mModel); glUniform3fv(m_FastShader.uiMsEyePosLoc, 1, vMsEyePos.ptr()); PVRTVec3 vMsLightDir = PVRTVec3(PVRTVec4(1, 1, 1, 1) * mModel).normalized(); glUniform3fv(m_FastShader.uiMsLightDirLoc, 1, vMsLightDir.ptr()); } else { glUseProgram(m_SlowShader.uiId); glUniformMatrix4fv(m_SlowShader.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.ptr()); PVRTVec3 vMsEyeDir = PVRTVec3(PVRTVec4(0, 0, 150, 1) * mModel).normalized(); glUniform3fv(m_SlowShader.uiMsEyeDirLoc, 1, vMsEyeDir.ptr()); PVRTVec3 vMsLightDir = PVRTVec3(PVRTVec4(1, 1, 1, 1) * mModel).normalized(); glUniform3fv(m_SlowShader.uiMsLightDirLoc, 1, vMsLightDir.ptr()); } /* Now that the uniforms are set, call another function to actually draw the mesh. */ DrawMesh(0); // Displays the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools m_Print3D.DisplayDefaultTitle("AnisotropicLighting", "", ePVRTPrint3DLogoIMG); m_Print3D.Print3D(0.3f, 7.5f, 0.75f, PVRTRGBA(255,255,255,255), c_aszRenderModes[m_eRenderMode]); m_Print3D.Flush(); return true; }
/*!**************************************************************************** @Function InitView @Return bool true if no error occured @Description Code in InitView() will be called by PVRShell upon initialization or after a change in the rendering context. Used to initialize variables that are dependant on the rendering context (e.g. textures, vertex buffers, etc.) ******************************************************************************/ bool OGLES2AnisotropicLighting::InitView() { CPVRTString ErrorStr; /* Initialize VBO data */ LoadVbos(); /* Load textures */ if (!LoadTextures(&ErrorStr)) { PVRShellSet(prefExitMessage, ErrorStr.c_str()); return false; } /* Load and compile the shaders & link programs */ if (!LoadShaders(&ErrorStr)) { PVRShellSet(prefExitMessage, ErrorStr.c_str()); return false; } // Is the screen rotated? bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen); /* Initialize Print3D */ if(m_Print3D.SetTextures(0,PVRShellGet(prefWidth),PVRShellGet(prefHeight), bRotate) != PVR_SUCCESS) { PVRShellSet(prefExitMessage, "ERROR: Cannot initialise Print3D\n"); return false; } /* Calculate the projection and view matrices */ float fAspect = PVRShellGet(prefWidth) / (float)PVRShellGet(prefHeight); m_mViewProj = PVRTMat4::PerspectiveFovFloatDepthRH(CAM_FOV, fAspect, CAM_NEAR, PVRTMat4::OGL, bRotate); m_mViewProj *= PVRTMat4::LookAtRH(PVRTVec3(0.f, 0.f, 150.f), PVRTVec3(0.f), PVRTVec3(0.f, 1.f, 0.f)); /* Set uniforms that are constant throughout this training course */ // Set the sampler2D variable to the first texture unit glUseProgram(m_FastShader.uiId); glUniform1i(glGetUniformLocation(m_FastShader.uiId, "sTexture"), 0); // Define material properties glUseProgram(m_SlowShader.uiId); float afMaterial[4] = { 0.4f, // Diffuse intensity scale 0.6f, // Diffuse intensity bias 0.82f, // Specular intensity scale 0.0f, // Specular bias }; glUniform4fv(glGetUniformLocation(m_SlowShader.uiId, "Material"), 1, afMaterial); // Set surface grain direction PVRTVec3 vMsGrainDir = PVRTVec3(2, 1, 0).normalized(); glUniform3fv(glGetUniformLocation(m_SlowShader.uiId, "GrainDir"), 1, vMsGrainDir.ptr()); /* Set OpenGL ES render states needed for this training course */ // Enable backface culling and depth test glCullFace(GL_BACK); glEnable(GL_CULL_FACE); // Enable z-buffer test // We are using a projection matrix optimized for a floating point depth buffer, // so the depth test and clear value need to be inverted (1 becomes near, 0 becomes far). glEnable(GL_DEPTH_TEST); glDepthFunc(GL_GEQUAL); glClearDepthf(0.0f); // Use a nice bright blue as clear colour glClearColor(0.6f, 0.8f, 1.0f, 1.0f); m_fAngleY = 0; m_eRenderMode = eTexLookup; return true; }
/*!**************************************************************************** @Function RenderScene @Return bool true if no error occured @Description Main rendering loop function of the program. The shell will call this function every frame. eglSwapBuffers() will be performed by PVRShell automatically. PVRShell will also manage important OS events. Will also manage relevent OS events. The user has access to these events through an abstraction layer provided by PVRShell. ******************************************************************************/ bool OGLES3IntroducingPOD::RenderScene() { // Clear the color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Use shader program glUseProgram(m_ShaderProgram.uiId); /* Calculates the frame number to animate in a time-based manner. Uses the shell function PVRShellGetTime() to get the time in milliseconds. */ unsigned long ulTime = PVRShellGetTime(); if(m_ulTimePrev > ulTime) m_ulTimePrev = ulTime; unsigned long ulDeltaTime = ulTime - m_ulTimePrev; m_ulTimePrev = ulTime; m_fFrame += (float)ulDeltaTime * g_fDemoFrameRate; if (m_fFrame > m_Scene.nNumFrame - 1) m_fFrame = 0; // Sets the scene animation to this frame m_Scene.SetFrame(m_fFrame); /* Get the direction of the first light from the scene. */ PVRTVec4 vLightDirection; vLightDirection = m_Scene.GetLightDirection(0); // For direction vectors, w should be 0 vLightDirection.w = 0.0f; /* Set up the view and projection matrices from the camera */ PVRTMat4 mView, mProjection; PVRTVec3 vFrom, vTo(0.0f), vUp(0.0f, 1.0f, 0.0f); float fFOV; // Setup the camera // Camera nodes are after the mesh and light nodes in the array int i32CamID = m_Scene.pNode[m_Scene.nNumMeshNode + m_Scene.nNumLight + g_ui32Camera].nIdx; // Get the camera position, target and field of view (fov) if(m_Scene.pCamera[i32CamID].nIdxTarget != -1) // Does the camera have a target? fFOV = m_Scene.GetCameraPos( vFrom, vTo, g_ui32Camera); // vTo is taken from the target node else fFOV = m_Scene.GetCamera( vFrom, vTo, vUp, g_ui32Camera); // vTo is calculated from the rotation // We can build the model view matrix from the camera position, target and an up vector. // For this we use PVRTMat4::LookAtRH() mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp); // Calculate the projection matrix bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen); mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), g_fCameraNear, g_fCameraFar, PVRTMat4::OGL, bRotate); /* A scene is composed of nodes. There are 3 types of nodes: - MeshNodes : references a mesh in the pMesh[]. These nodes are at the beginning of the pNode[] array. And there are nNumMeshNode number of them. This way the .pod format can instantiate several times the same mesh with different attributes. - lights - cameras To draw a scene, you must go through all the MeshNodes and draw the referenced meshes. */ for (unsigned int i = 0; i < m_Scene.nNumMeshNode; ++i) { SPODNode& Node = m_Scene.pNode[i]; // Get the node model matrix PVRTMat4 mWorld; mWorld = m_Scene.GetWorldMatrix(Node); // Pass the model-view-projection matrix (MVP) to the shader to transform the vertices PVRTMat4 mModelView, mMVP; mModelView = mView * mWorld; mMVP = mProjection * mModelView; glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.f); // Pass the light direction in model space to the shader PVRTVec4 vLightDir; vLightDir = mWorld.inverse() * vLightDirection; PVRTVec3 vLightDirModel = *(PVRTVec3*)&vLightDir; vLightDirModel.normalize(); glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, &vLightDirModel.x); // Load the correct texture using our texture lookup table GLuint uiTex = 0; if(Node.nIdxMaterial != -1) uiTex = m_puiTextureIDs[Node.nIdxMaterial]; glBindTexture(GL_TEXTURE_2D, uiTex); /* Now that the model-view matrix is set and the materials are ready, call another function to actually draw the mesh. */ DrawMesh(i); } // Display the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools m_Print3D.DisplayDefaultTitle("IntroducingPOD", "", ePVRTPrint3DSDKLogo); m_Print3D.Flush(); return true; }
//========================================================================================================================================== bool OGLES2Water::GenerateNormalisationCubeMap(int uiTextureSize) { // variables float fOffset = 0.5f; float fHalfSize = uiTextureSize *0.5f; PVRTVec3 vTemp; unsigned char* pByte; unsigned char* pData = new unsigned char[uiTextureSize*uiTextureSize*3]; if(!pData) { PVRShellOutputDebug("Unable to allocate memory for texture data for cube map\n"); return false; } // Positive X pByte = pData; for(int j = 0; j < uiTextureSize; ++j) { for(int i = 0; i < uiTextureSize; ++i) { vTemp.x = fHalfSize; vTemp.y = -(j + fOffset - fHalfSize); vTemp.z = -(i + fOffset - fHalfSize); // normalize, pack 0 to 1 here, and normalize again vTemp = vTemp.normalize() *0.5 + 0.5; pByte[0] = (unsigned char)(vTemp.x * 255); pByte[1] = (unsigned char)(vTemp.y * 255); pByte[2] = (unsigned char)(vTemp.z * 255); pByte += 3; } } glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, GL_RGB /*GL_RGBA8*/, uiTextureSize, uiTextureSize, 0, GL_RGB, GL_UNSIGNED_BYTE, pData); // Negative X pByte = pData; for(int j = 0; j < uiTextureSize; ++j) { for(int i = 0; i < uiTextureSize; ++i) { vTemp.x = -fHalfSize; vTemp.y = -(j + fOffset - fHalfSize); vTemp.z = (i + fOffset - fHalfSize); // normalize, pack 0 to 1 here, and normalize again vTemp = vTemp.normalize() *0.5 + 0.5; pByte[0] = (unsigned char)(vTemp.x * 255); pByte[1] = (unsigned char)(vTemp.y * 255); pByte[2] = (unsigned char)(vTemp.z * 255); pByte += 3; } } glTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, GL_RGB /*GL_RGBA8*/, uiTextureSize, uiTextureSize, 0, GL_RGB, GL_UNSIGNED_BYTE, pData); // Positive Y pByte = pData; for(int j = 0; j < uiTextureSize; ++j) { for(int i = 0; i < uiTextureSize; ++i) { vTemp.x = i + fOffset - fHalfSize; vTemp.y = fHalfSize; vTemp.z = j + fOffset - fHalfSize; // normalize, pack 0 to 1 here, and normalize again vTemp = vTemp.normalize() *0.5 + 0.5; pByte[0] = (unsigned char)(vTemp.x * 255); pByte[1] = (unsigned char)(vTemp.y * 255); pByte[2] = (unsigned char)(vTemp.z * 255); pByte += 3; } } glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, GL_RGB /*GL_RGBA8*/, uiTextureSize, uiTextureSize, 0, GL_RGB, GL_UNSIGNED_BYTE, pData); // Negative Y pByte = pData; for(int j = 0; j < uiTextureSize; ++j) { for(int i = 0; i < uiTextureSize; ++i) { vTemp.x = i + fOffset - fHalfSize; vTemp.y = -fHalfSize; vTemp.z = -(j + fOffset - fHalfSize); // normalize, pack 0 to 1 here, and normalize again vTemp = vTemp.normalize() *0.5 + 0.5; pByte[0] = (unsigned char)(vTemp.x * 255); pByte[1] = (unsigned char)(vTemp.y * 255); pByte[2] = (unsigned char)(vTemp.z * 255); pByte += 3; } } glTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, GL_RGB /*GL_RGBA8*/, uiTextureSize, uiTextureSize, 0, GL_RGB, GL_UNSIGNED_BYTE, pData); // Positive Z pByte = pData; for(int j = 0; j < uiTextureSize; ++j) { for(int i = 0; i < uiTextureSize; ++i) { vTemp.x = i + fOffset - fHalfSize; vTemp.y = -(j + fOffset - fHalfSize); vTemp.z = fHalfSize; // normalize, pack 0 to 1 here, and normalize again vTemp = vTemp.normalize() *0.5 + 0.5; pByte[0] = (unsigned char)(vTemp.x * 255); pByte[1] = (unsigned char)(vTemp.y * 255); pByte[2] = (unsigned char)(vTemp.z * 255); pByte += 3; } } glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, GL_RGB /*GL_RGBA8*/, uiTextureSize, uiTextureSize, 0, GL_RGB, GL_UNSIGNED_BYTE, pData); // Negative Z pByte = pData; for(int j = 0; j < uiTextureSize; ++j) { for(int i = 0; i < uiTextureSize; ++i) { vTemp.x = -(i + fOffset - fHalfSize); vTemp.y = -(j + fOffset - fHalfSize); vTemp.z = -fHalfSize; // normalize, pack 0 to 1 here, and normalize again vTemp = vTemp.normalize() *0.5 + 0.5; pByte[0] = (unsigned char)(vTemp.x * 255); pByte[1] = (unsigned char)(vTemp.y * 255); pByte[2] = (unsigned char)(vTemp.z * 255); pByte += 3; } } glTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, GL_RGB /*GL_RGBA8*/, uiTextureSize, uiTextureSize, 0, GL_RGB, GL_UNSIGNED_BYTE, pData); delete[] pData; return true; }
void ParametricSurface::ComputeVertexAndNormals(PFUNCTION function, float dMinU, float dMaxU, float dMinV, float dMaxV) { int nVertex = nSampleU * nSampleV; pVertex = new float[nVertex*3]; pNormal = new float[nVertex*3]; pUV = new float[nVertex*2]; fMinU = dMinU; fMaxU = dMaxU; fMinV = dMinV; fMaxV = dMaxV; for (int i=0; i<nSampleU; i++) { for (int j=0; j<nSampleV; j++) { float u = fMinU + i * (fMaxU-fMinU) / (float)(nSampleU-1); float v = fMinV + j * (fMaxV-fMinV) / (float)(nSampleV-1); float x,y,z; function(u,v, &x,&y,&z); pVertex[(j*nSampleU+i)*3 + 0] = x; pVertex[(j*nSampleU+i)*3 + 1] = y; pVertex[(j*nSampleU+i)*3 + 2] = z; } } for (int i=0; i<nSampleU; i++) { for (int j=0; j<nSampleV; j++) { pUV[ (j*nSampleU+i)*2 + 0 ] = (float)i / (float)(nSampleU-1); pUV[ (j*nSampleU+i)*2 + 1 ] = (float)j / (float)(nSampleV-1); } } for (int i=0; i<nSampleU-1; i++) { for (int j=0; j<nSampleV-1; j++) { PVRTVec3 ptA = PVRTVec3(pVertex[(j*nSampleU+i)*3+0],pVertex[(j*nSampleU+i)*3+1],pVertex[(j*nSampleU+i)*3+2]); PVRTVec3 ptB = PVRTVec3(pVertex[(j*nSampleU+i+1)*3+0],pVertex[(j*nSampleU+i+1)*3+1],pVertex[(j*nSampleU+i+1)*3+2]); PVRTVec3 ptC = PVRTVec3(pVertex[((j+1)*nSampleU+i)*3+0],pVertex[((j+1)*nSampleU+i)*3+1],pVertex[((j+1)*nSampleU+i)*3+2]); PVRTVec3 AB = PVRTVec3(ptB.x-ptA.x, ptB.y-ptA.y, ptB.z-ptA.z); PVRTVec3 AC = PVRTVec3(ptC.x-ptA.x, ptC.y-ptA.y, ptC.z-ptA.z); PVRTVec3 normal; normal = AB.cross(AC); normal.normalize(); pNormal[(j*nSampleU+i)*3 + 0] = -normal.x; pNormal[(j*nSampleU+i)*3 + 1] = -normal.y; pNormal[(j*nSampleU+i)*3 + 2] = -normal.z; } } for (int i=0; i<nSampleU-1; i++) { pNormal[((nSampleV-1)*nSampleU+i)*3+0] = pNormal[(i)*3+0]; pNormal[((nSampleV-1)*nSampleU+i)*3+1] = pNormal[(i)*3+1]; pNormal[((nSampleV-1)*nSampleU+i)*3+2] = pNormal[(i)*3+2]; } for (int j=0; j<nSampleV-1; j++) { pNormal[(j*nSampleU+nSampleU-1)*3+0] = pNormal[(j*nSampleU)*3+0]; pNormal[(j*nSampleU+nSampleU-1)*3+1] = pNormal[(j*nSampleU)*3+1]; pNormal[(j*nSampleU+nSampleU-1)*3+2] = pNormal[(j*nSampleU)*3+2]; } pNormal[((nSampleV-1)*nSampleU + (nSampleU-1))*3+0]= pNormal[((nSampleV-2)*nSampleU + (nSampleU-2))*3+0]; pNormal[((nSampleV-1)*nSampleU + (nSampleU-1))*3+1]= pNormal[((nSampleV-2)*nSampleU + (nSampleU-2))*3+1]; pNormal[((nSampleV-1)*nSampleU + (nSampleU-1))*3+2]= pNormal[((nSampleV-2)*nSampleU + (nSampleU-2))*3+2]; // Insert generated data into vertex buffer objects. glBindBuffer(GL_ARRAY_BUFFER, iVertexVBO); glBufferData(GL_ARRAY_BUFFER, nVertex * 3 * sizeof (float), pVertex, GL_STATIC_DRAW); glBindBuffer(GL_ARRAY_BUFFER, iUvVBO); glBufferData(GL_ARRAY_BUFFER, nVertex * 2 * sizeof (float), pUV, GL_STATIC_DRAW); glBindBuffer(GL_ARRAY_BUFFER, iNormalVBO); glBufferData(GL_ARRAY_BUFFER, nVertex * 3 * sizeof (float), pNormal, GL_STATIC_DRAW); glBindBuffer(GL_ARRAY_BUFFER, 0); // Unbind the last buffer used. delete[] pVertex; delete[] pNormal; delete[] pUV; }
/*!**************************************************************************** @Function RenderScene @Return bool true if no error occurred @Description Main rendering loop function of the program. The shell will call this function every frame. eglSwapBuffers() will be performed by PVRShell automatically. PVRShell will also manage important OS events. Will also manage relevant OS events. The user has access to these events through an abstraction layer provided by PVRShell. ******************************************************************************/ bool OGLES3DisplacementMap::RenderScene() { // Clear the color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Calculates the the time since the last frame unsigned long ulTime = PVRShellGetTime(); unsigned long ulDeltaTime = ulTime - m_ulTimePrev; m_ulTimePrev = ulTime; // Use shader program glUseProgram(m_ShaderProgram.uiId); // Enable 2D texturing for the first texture. glActiveTexture(GL_TEXTURE0); // Set the sampler2D variable to the first texture unit glUniform1i(m_ShaderProgram.uiTexture, 0); // Enable 2D texturing for the second texture. glActiveTexture(GL_TEXTURE1); // Set the displacement map variable to the second texture unit glUniform1i(m_ShaderProgram.uiDisMap, 1); // Calculate and set the displacement factor if(m_bGrow) { m_DisplacementFactor += (float)ulDeltaTime * g_fDemoFrameRate; if(m_DisplacementFactor > 25.0f) { m_bGrow = false; m_DisplacementFactor = 25.0f; } } else { m_DisplacementFactor -= (float)ulDeltaTime * g_fDemoFrameRate; if(m_DisplacementFactor < 0.0f) { m_bGrow = true; m_DisplacementFactor = 0.0f; } } glUniform1f(m_ShaderProgram.uiDisplacementFactor, m_DisplacementFactor); // Bind the displacement map texture glBindTexture(GL_TEXTURE_2D, m_uiDisMapID); // Now the displacement map texture is bound set the active texture to texture 0 glActiveTexture(GL_TEXTURE0); // Draw the scene // Enable the vertex attribute arrays glEnableVertexAttribArray(VERTEX_ARRAY); glEnableVertexAttribArray(NORMAL_ARRAY); glEnableVertexAttribArray(TEXCOORD_ARRAY); for(unsigned int i = 0; i < m_Scene.nNumMeshNode; ++i) { SPODNode& Node = m_Scene.pNode[i]; // Get the node model matrix PVRTMat4 mWorld; mWorld = m_Scene.GetWorldMatrix(Node); // Pass the model-view-projection matrix (MVP) to the shader to transform the vertices PVRTMat4 mModelView, mMVP; mModelView = m_View * mWorld; mMVP = m_Projection * mModelView; glUniformMatrix4fv(m_ShaderProgram.uiMVPMatrixLoc, 1, GL_FALSE, mMVP.f); // Pass the light direction in model space to the shader PVRTVec4 vLightDir; vLightDir = mWorld.inverse() * m_LightDir; PVRTVec3 vLightDirModel = *(PVRTVec3*) vLightDir.ptr(); vLightDirModel.normalize(); glUniform3fv(m_ShaderProgram.uiLightDirLoc, 1, &vLightDirModel.x); // Load the correct texture for the mesh using our texture lookup table GLuint uiTex = 0; if(Node.nIdxMaterial != -1) uiTex = m_puiTextureIDs[Node.nIdxMaterial]; glBindTexture(GL_TEXTURE_2D, uiTex); /* Now that the model-view matrix is set and the materials ready, call another function to actually draw the mesh. */ DrawMesh(i); } // Safely disable the vertex attribute arrays glDisableVertexAttribArray(VERTEX_ARRAY); glDisableVertexAttribArray(NORMAL_ARRAY); glDisableVertexAttribArray(TEXCOORD_ARRAY); glBindBuffer(GL_ARRAY_BUFFER, 0); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); // Display the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools m_Print3D.DisplayDefaultTitle("DisplacementMapping", "", ePVRTPrint3DSDKLogo); m_Print3D.Flush(); return true; }
/*!**************************************************************************** @Function UpdateFurShells @Description Update the fur shells. This is only called when the number of shells change. ******************************************************************************/ void OGLESFur::UpdateFurShells() { PVRTVec3 *pvSrcN, *pvSrcV; PVRTVec3 vTransNorm; PVRTVec4 vTransPos; SVertex *pvData; int i; unsigned int j; float fDepth, *pUV; int i32MeshIndex = m_Scene.pNode[eDuckBody].nIdx; SPODMesh* pMesh = &m_Scene.pMesh[i32MeshIndex]; PVRTMat4 mModel; PVRTMat3 mModel3; m_Scene.GetWorldMatrix(mModel, m_Scene.pNode[eDuckBody]); mModel3 = PVRTMat3(mModel); pvData = new SVertex[pMesh->nNumVertex]; if(!pvData) return; for(i = 0; i < m_i32FurShellNo; ++i) { fDepth = (c_fFurDepth * (float)(i+1) / (float)m_i32FurShellNo); for(j = 0; j < pMesh->nNumVertex; ++j) { pvSrcN = (PVRTVec3*) (pMesh->pInterleaved + (size_t) pMesh->sNormals.pData + (j * pMesh->sNormals.nStride)); pvSrcV = (PVRTVec3*) (pMesh->pInterleaved + (size_t) pMesh->sVertex.pData + (j * pMesh->sVertex.nStride)); pUV = (float*) (pMesh->pInterleaved + (size_t) pMesh->psUVW[0].pData + (j * pMesh->psUVW[0].nStride)); // Transform the vertex position so it is in world space PVRTVec4 vPos4 = PVRTVec4(*pvSrcV, 1.0f); PVRTTransform(&vTransPos, &vPos4, &mModel); // Transform the vertex normal so it is in world space vTransNorm.x = mModel.f[0] * pvSrcN->x + mModel.f[4] * pvSrcN->y + mModel.f[8] * pvSrcN->z; vTransNorm.y = mModel.f[1] * pvSrcN->x + mModel.f[5] * pvSrcN->y + mModel.f[9] * pvSrcN->z; vTransNorm.z = mModel.f[2] * pvSrcN->x + mModel.f[6] * pvSrcN->y + mModel.f[10]* pvSrcN->z; vTransNorm.normalize(); pvData[j].x = vTransPos.x + (vTransNorm.x * fDepth); pvData[j].y = vTransPos.y + (vTransNorm.y * fDepth); pvData[j].z = vTransPos.z + (vTransNorm.z * fDepth); pvData[j].nx = vTransNorm.x; pvData[j].ny = vTransNorm.y; pvData[j].nz = vTransNorm.z; pvData[j].tu = pUV[0]; pvData[j].tv = pUV[1]; } glBindBuffer(GL_ARRAY_BUFFER, m_uiShellVbo[i]); unsigned int uiSize = pMesh->nNumVertex * sizeof(SVertex); glBufferData(GL_ARRAY_BUFFER, uiSize, pvData, GL_STATIC_DRAW); glBindBuffer(GL_ARRAY_BUFFER, 0); } delete[] pvData; }
/*!**************************************************************************** @Function RenderScene @Return bool true if no error occured @Description Main rendering loop function of the program. The shell will call this function every frame. eglSwapBuffers() will be performed by PVRShell automatically. PVRShell will also manage important OS events. Will also manage relevent OS events. The user has access to these events through an abstraction layer provided by PVRShell. ******************************************************************************/ bool OGLES2ChameleonMan::RenderScene() { // Clear the color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Use shader program glUseProgram(m_SkinnedShaderProgram.uiId); if(PVRShellIsKeyPressed(PVRShellKeyNameACTION1)) { m_bEnableDOT3 = !m_bEnableDOT3; glUniform1i(m_SkinnedShaderProgram.auiLoc[ebUseDot3], m_bEnableDOT3); } /* Calculates the frame number to animate in a time-based manner. Uses the shell function PVRShellGetTime() to get the time in milliseconds. */ unsigned long iTime = PVRShellGetTime(); if(iTime > m_iTimePrev) { float fDelta = (float) (iTime - m_iTimePrev); m_fFrame += fDelta * g_fDemoFrameRate; // Increment the counters to make sure our animation works m_fLightPos += fDelta * 0.0034f; m_fWallPos += fDelta * 0.00027f; m_fBackgroundPos += fDelta * -0.000027f; // Wrap the Animation back to the Start if(m_fLightPos >= PVRT_TWO_PI) m_fLightPos -= PVRT_TWO_PI; if(m_fWallPos >= PVRT_TWO_PI) m_fWallPos -= PVRT_TWO_PI; if(m_fBackgroundPos <= 0) m_fBackgroundPos += 1.0f; if(m_fFrame > m_Scene.nNumFrame - 1) m_fFrame = 0; } m_iTimePrev = iTime; // Set the scene animation to the current frame m_Scene.SetFrame(m_fFrame); // Set up camera PVRTVec3 vFrom, vTo, vUp(0.0f, 1.0f, 0.0f); PVRTMat4 mView, mProjection; PVRTVec3 LightPos; float fFOV; int i; bool bRotate = PVRShellGet(prefIsRotated) && PVRShellGet(prefFullScreen); // Get the camera position, target and field of view (fov) if(m_Scene.pCamera[0].nIdxTarget != -1) // Does the camera have a target? fFOV = m_Scene.GetCameraPos( vFrom, vTo, 0); // vTo is taken from the target node else fFOV = m_Scene.GetCamera( vFrom, vTo, vUp, 0); // vTo is calculated from the rotation fFOV *= bRotate ? (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight) : (float)PVRShellGet(prefHeight)/(float)PVRShellGet(prefWidth); /* We can build the model view matrix from the camera position, target and an up vector. For this we use PVRTMat4::LookAtRH(). */ mView = PVRTMat4::LookAtRH(vFrom, vTo, vUp); // Calculate the projection matrix mProjection = PVRTMat4::PerspectiveFovRH(fFOV, (float)PVRShellGet(prefWidth)/(float)PVRShellGet(prefHeight), g_fCameraNear, g_fCameraFar, PVRTMat4::OGL, bRotate); // Update Light Position and related VGP Program constant LightPos.x = 200.0f; LightPos.y = 350.0f; LightPos.z = 200.0f * PVRTABS(sin((PVRT_PI / 4.0f) + m_fLightPos)); glUniform3fv(m_SkinnedShaderProgram.auiLoc[eLightPos], 1, LightPos.ptr()); // Set up the View * Projection Matrix PVRTMat4 mViewProjection; mViewProjection = mProjection * mView; glUniformMatrix4fv(m_SkinnedShaderProgram.auiLoc[eViewProj], 1, GL_FALSE, mViewProjection.ptr()); // Enable the vertex attribute arrays for(i = 0; i < eNumAttribs; ++i) glEnableVertexAttribArray(i); // Draw skinned meshes for(unsigned int i32NodeIndex = 0; i32NodeIndex < 3; ++i32NodeIndex) { // Bind correct texture switch(i32NodeIndex) { case eBody: glActiveTexture(GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D, m_ui32TexHeadNormalMap); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_ui32TexHeadBody); break; case eLegs: glActiveTexture(GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D, m_ui32TexLegsNormalMap); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_ui32TexLegs); break; default: glActiveTexture(GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D, m_ui32TexBeltNormalMap); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, m_ui32TexBelt); break; } DrawSkinnedMesh(i32NodeIndex); } // Safely disable the vertex attribute arrays for(i = 0; i < eNumAttribs; ++i) glDisableVertexAttribArray(i); // Draw non-skinned meshes glUseProgram(m_DefaultShaderProgram.uiId); // Enable the vertex attribute arrays for(i = 0; i < eNumDefaultAttribs; ++i) glEnableVertexAttribArray(i); for(unsigned int i32NodeIndex = 3; i32NodeIndex < m_Scene.nNumMeshNode; ++i32NodeIndex) { SPODNode& Node = m_Scene.pNode[i32NodeIndex]; SPODMesh& Mesh = m_Scene.pMesh[Node.nIdx]; // bind the VBO for the mesh glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[Node.nIdx]); // bind the index buffer, won't hurt if the handle is 0 glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[Node.nIdx]); // Get the node model matrix PVRTMat4 mWorld; mWorld = m_Scene.GetWorldMatrix(Node); // Setup the appropriate texture and transformation (if needed) switch(i32NodeIndex) { case eWall: glBindTexture(GL_TEXTURE_2D, m_ui32TexWall); // Rotate the wall mesh which is circular mWorld *= PVRTMat4::RotationY(m_fWallPos); glUniform1f(m_DefaultShaderProgram.auiLoc[eDefaultUOffset], 0); break; case eBackground: glBindTexture(GL_TEXTURE_2D, m_ui32TexSkyLine); glUniform1f(m_DefaultShaderProgram.auiLoc[eDefaultUOffset], m_fBackgroundPos); break; case eLights: { glBindTexture(GL_TEXTURE_2D, m_ui32TexLamp); PVRTMat4 mWallWorld = m_Scene.GetWorldMatrix(m_Scene.pNode[eWall]); mWorld = mWallWorld * PVRTMat4::RotationY(m_fWallPos) * mWallWorld.inverse() * mWorld; glUniform1f(m_DefaultShaderProgram.auiLoc[eDefaultUOffset], 0); } break; default: break; }; // Set up shader uniforms PVRTMat4 mModelViewProj; mModelViewProj = mViewProjection * mWorld; glUniformMatrix4fv(m_DefaultShaderProgram.auiLoc[eDefaultMVPMatrix], 1, GL_FALSE, mModelViewProj.ptr()); // Set the vertex attribute offsets glVertexAttribPointer(DEFAULT_VERTEX_ARRAY, 3, GL_FLOAT, GL_FALSE, Mesh.sVertex.nStride, Mesh.sVertex.pData); glVertexAttribPointer(DEFAULT_TEXCOORD_ARRAY, 2, GL_FLOAT, GL_FALSE, Mesh.psUVW[0].nStride, Mesh.psUVW[0].pData); // Indexed Triangle list glDrawElements(GL_TRIANGLES, Mesh.nNumFaces*3, GL_UNSIGNED_SHORT, 0); } // Safely disable the vertex attribute arrays for(i = 0; i < eNumAttribs; ++i) glDisableVertexAttribArray(i); // unbind the VBOs glBindBuffer(GL_ARRAY_BUFFER, 0); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); // Display the demo name using the tools. For a detailed explanation, see the training course IntroducingPVRTools const char * pDescription; if(m_bEnableDOT3) pDescription = "Skinning with DOT3 Per Pixel Lighting"; else pDescription = "Skinning with Vertex Lighting"; m_Print3D.DisplayDefaultTitle("Chameleon Man", pDescription, ePVRTPrint3DSDKLogo); m_Print3D.Flush(); return true; }
/*!**************************************************************************** @Function RenderSceneWithEffect @Return bool true if no error occured @Description Renders the whole scene with a single effect. ******************************************************************************/ bool OGLES3ShadowMapping::RenderSceneWithEffect(const int uiEffectId, const PVRTMat4 &mProjection, const PVRTMat4 &mView) { CPVRTPFXEffect *pEffect = m_ppPFXEffects[uiEffectId]; // Activate the passed effect pEffect->Activate(); for (unsigned int i=0; i < m_Scene.nNumMeshNode; i++) { SPODNode* pNode = &m_Scene.pNode[i]; SPODMesh* pMesh = &m_Scene.pMesh[pNode->nIdx]; SPODMaterial *pMaterial = 0; if (pNode->nIdxMaterial != -1) { pMaterial = &m_Scene.pMaterial[pNode->nIdxMaterial]; // Bind the texture if there is one bound to this object if (pMaterial->nIdxTexDiffuse != -1) { CPVRTString texname = CPVRTString(m_Scene.pTexture[pMaterial->nIdxTexDiffuse].pszName).substitute(".png", ""); CPVRTStringHash hashedName(texname); if (m_TextureCache.Exists(hashedName)) glBindTexture(GL_TEXTURE_2D, m_TextureCache[hashedName]); } } glBindBuffer(GL_ARRAY_BUFFER, m_puiVbo[i]); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_puiIndexVbo[i]); // Pre-calculate commonly used matrices PVRTMat4 mWorld; m_Scene.GetWorldMatrix(mWorld, *pNode); PVRTMat4 mWorldView = mView * mWorld; // Bind semantics const CPVRTArray<SPVRTPFXUniform>& Uniforms = pEffect->GetUniformArray(); for(unsigned int j = 0; j < Uniforms.GetSize(); ++j) { switch(Uniforms[j].nSemantic) { case ePVRTPFX_UsPOSITION: { glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sVertex.nStride, pMesh->sVertex.pData); glEnableVertexAttribArray(Uniforms[j].nLocation); } break; case ePVRTPFX_UsNORMAL: { glVertexAttribPointer(Uniforms[j].nLocation, 3, GL_FLOAT, GL_FALSE, pMesh->sNormals.nStride, pMesh->sNormals.pData); glEnableVertexAttribArray(Uniforms[j].nLocation); } break; case ePVRTPFX_UsUV: { glVertexAttribPointer(Uniforms[j].nLocation, 2, GL_FLOAT, GL_FALSE, pMesh->psUVW[0].nStride, pMesh->psUVW[0].pData); glEnableVertexAttribArray(Uniforms[j].nLocation); } break; case ePVRTPFX_UsMATERIALCOLORDIFFUSE: { if (pMaterial) glUniform4f(Uniforms[j].nLocation, pMaterial->pfMatDiffuse[0], pMaterial->pfMatDiffuse[1], pMaterial->pfMatDiffuse[2], 1.0f); } break; case ePVRTPFX_UsWORLDVIEWPROJECTION: { PVRTMat4 mWorldViewProj = mProjection * mWorldView; glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mWorldViewProj.f); } break; case ePVRTPFX_UsWORLDI: { PVRTMat3 mWorldI3x3(mWorld.inverse()); glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, mWorldI3x3.f); } break; case ePVRTPFX_UsWORLDVIEWIT: { PVRTMat3 mWorldViewIT3x3(mWorldView.inverse().transpose()); glUniformMatrix3fv(Uniforms[j].nLocation, 1, GL_FALSE, mWorldViewIT3x3.f); } break; case ePVRTPFX_UsTEXTURE: { // Set the sampler variable to the texture unit glUniform1i(Uniforms[j].nLocation, Uniforms[j].nIdx); } break; case ePVRTPFX_UsLIGHTPOSWORLD: { glUniform3fv(Uniforms[j].nLocation, 1, m_vLightPosition.ptr()); } break; case eCUSTOMSEMANTIC_SHADOWTRANSMATRIX: { // We need to calculate the texture projection matrix. This matrix takes the pixels from world space to previously rendered light projection space //where we can look up values from our saved depth buffer. The matrix is constructed from the light view and projection matrices as used for the previous render and //then multiplied by the inverse of the current view matrix. //PVRTMat4 mTextureMatrix = m_mBiasMatrix * m_mLightProjection * m_mLightView * mView.inverse(); PVRTMat4 mTextureMatrix = m_mBiasMatrix * m_mLightProjection * m_mLightView * mWorld; glUniformMatrix4fv(Uniforms[j].nLocation, 1, GL_FALSE, mTextureMatrix.f); } break; case ePVRTPFX_UsRANDOM: { glUniform1f(Uniforms[j].nLocation, m_fBias); } break; default: { PVRShellOutputDebug("Error: Unhandled semantic in RenderSceneWithEffect()\n"); return false; } } } // Now that all uniforms are set and the materials ready, draw the mesh. glDrawElements(GL_TRIANGLES, pMesh->nNumFaces*3, GL_UNSIGNED_SHORT, 0); // Disable all vertex attributes for(unsigned int j = 0; j < Uniforms.GetSize(); ++j) { switch(Uniforms[j].nSemantic) { case ePVRTPFX_UsPOSITION: case ePVRTPFX_UsNORMAL: case ePVRTPFX_UsUV: glDisableVertexAttribArray(Uniforms[j].nLocation); break; } } } glBindBuffer(GL_ARRAY_BUFFER, 0); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); return true; }