void pickRay( H3DNode cameraNode, float nwx, float nwy, float *ox, float *oy, float *oz, float *dx, float *dy, float *dz ) { // Transform from normalized window [0, 1] // to normalized device coordinates [-1, 1] float cx( 2.0f * nwx - 1.0f ); float cy( 2.0f * nwy - 1.0f ); // Get projection matrix Matrix4f projMat; h3dGetCameraProjMat( cameraNode, projMat.x ); // Get camera view matrix const float *camTrans; h3dGetNodeTransMats( cameraNode, 0x0, &camTrans ); Matrix4f viewMat( camTrans ); viewMat = viewMat.inverted(); // Create inverse view-projection matrix for unprojection Matrix4f invViewProjMat = (projMat * viewMat).inverted(); // Unproject Vec4f p0 = invViewProjMat * Vec4f( cx, cy, -1, 1 ); Vec4f p1 = invViewProjMat * Vec4f( cx, cy, 1, 1 ); p0.x /= p0.w; p0.y /= p0.w; p0.z /= p0.w; p1.x /= p1.w; p1.y /= p1.w; p1.z /= p1.w; if( h3dGetNodeParamI( cameraNode, H3DCamera::OrthoI ) == 1 ) { float frustumWidth = h3dGetNodeParamF( cameraNode, H3DCamera::RightPlaneF, 0 ) - h3dGetNodeParamF( cameraNode, H3DCamera::LeftPlaneF, 0 ); float frustumHeight = h3dGetNodeParamF( cameraNode, H3DCamera::TopPlaneF, 0 ) - h3dGetNodeParamF( cameraNode, H3DCamera::BottomPlaneF, 0 ); Vec4f p2( cx, cy, 0, 1 ); p2.x = cx * frustumWidth * 0.5f; p2.y = cy * frustumHeight * 0.5f; viewMat.x[12] = 0; viewMat.x[13] = 0; viewMat.x[14] = 0; p2 = viewMat.inverted() * p2; *ox = camTrans[12] + p2.x; *oy = camTrans[13] + p2.y; *oz = camTrans[14] + p2.z; } else { *ox = camTrans[12]; *oy = camTrans[13]; *oz = camTrans[14]; } *dx = p1.x - p0.x; *dy = p1.y - p0.y; *dz = p1.z - p0.z; }
void UsdMayaGLBatchRenderer::Draw( const MDrawRequest& request, M3dView &view ) { // VP 1.0 Implementation // MDrawData drawData = request.drawData(); _BatchDrawUserData* batchData = static_cast<_BatchDrawUserData*>(drawData.geometry()); if( !batchData ) return; MMatrix projectionMat; view.projectionMatrix(projectionMat); MMatrix modelViewMat; view.modelViewMatrix(modelViewMat); if( batchData->_bounds ) { px_vp20Utils::RenderBoundingBox(*(batchData->_bounds), *(batchData->_wireframeColor), modelViewMat, projectionMat); } if( batchData->_drawShape && !_renderQueue.empty() ) { MMatrix viewMat( request.matrix().inverse() * modelViewMat ); unsigned int viewX, viewY, viewWidth, viewHeight; view.viewport(viewX, viewY, viewWidth, viewHeight); GfVec4d viewport(viewX, viewY, viewWidth, viewHeight); // Only the first call to this will do anything... After that the batch // queue is cleared. // _RenderBatches( NULL, viewMat, projectionMat, viewport ); } // Clean up the _BatchDrawUserData! delete batchData; }
//----------------------------------------------------------------------- Real Node::getSquaredViewDepth(const Camera* cam) const { Vector3 diff = _getDerivedPosition() - cam->getDerivedPosition(); #if 1 Matrix4 viewObectMat; if (cam) { Matrix4 viewMat(cam->getViewMatrix(false)); Matrix4 objMat(_getFullTransform()); viewObectMat =viewMat * objMat ; } viewObectMat.getTrans(diff); diff.x =0; diff.z =0; #endif // NB use squared length rather than real depth to avoid square root return diff.squaredLength(); }
void Vessel::DrawWorld() { if (myPlayer) { camera.position = myPlayer->pos; glm::mat4 viewMat(1.0f); camera.GenerateView(viewMat); glUseProgram(floorProgram.program); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, floorProgram.texture); glUniformMatrix4fv(floorProgram.viewMat, 1, false, glm::value_ptr(viewMat)); glUniform1i(floorProgram.textureLoc, 0); DrawFloor(); glUseProgram(coloredVertexProgram.program); glm::vec4 color(0.5f, 0.5f, 0.5f, 1.0f); glUniform4fv(coloredVertexProgram.color, 1, glm::value_ptr(color)); glUniformMatrix4fv(coloredVertexProgram.viewMat, 1, false, glm::value_ptr(viewMat)); DrawWalls(); glDisable(GL_CULL_FACE); glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); glUseProgram(shadowProgram.program); glUniform2fv(shadowProgram.playerPos, 1, glm::value_ptr(camera.position)); glUniformMatrix4fv(shadowProgram.viewMat, 1, false, glm::value_ptr(viewMat)); DrawShadows(); glEnable(GL_CULL_FACE); glDisable(GL_BLEND); } }