void LabelGeometry::render(RenderContext& rc, double /* clock */) const { bool hasIcon = !m_icon.isNull(); Vector3f labelOffset = Vector3f::Zero(); if (hasIcon) { labelOffset.x() = std::floor(m_iconSize / 2.0f) + 1.0f; } float opacity = 0.99f; if (m_fadeRange.isValid()) { float cameraDistance = rc.modelview().translation().norm(); float pixelSize = m_fadeSize / (rc.pixelSize() * cameraDistance); opacity *= m_fadeRange->opacity(pixelSize); } if (opacity == 0.0f) { return; } // Render during the opaque pass if opaque or during the translucent pass if not. if (rc.pass() == RenderContext::TranslucentPass) { // Keep the screen size of the icon fixed by adding a scale factor equal // to the distance from the eye. float distanceScale = rc.modelview().translation().norm(); // Draw the label string as long as it's not empty if (!m_text.empty()) { rc.drawText(labelOffset, m_text, m_font.ptr(), m_color, opacity); } if (hasIcon) { Material material; material.setEmission(m_iconColor); material.setOpacity(opacity); material.setBaseTexture(m_icon.ptr()); rc.bindMaterial(&material); rc.drawBillboard(Vector3f::Zero(), m_iconSize * rc.pixelSize() * distanceScale); } } }
/** Render the sensor frustum. */ void SensorFrustumGeometry::render(RenderContext& rc, double currentTime) const { Material material; material.setDiffuse(m_color); material.setOpacity(m_opacity); rc.setVertexInfo(VertexSpec::Position); rc.bindMaterial(&material); if (source() && target()) { Vector3d p = target()->position(currentTime) - source()->position(currentTime); // Get the position of the source in the local coordinate system of the target Matrix3d targetRotation = target()->orientation(currentTime).conjugate().toRotationMatrix(); Vector3d p2 = targetRotation * -p; // Special handling for ellipsoidal objects, i.e. planets. // TODO: Should have a cleaner solution here than a dynamic_cast bool ellipsoidalTarget = false; Vector3d targetSemiAxes = Vector3d::Ones(); if (dynamic_cast<WorldGeometry*>(target()->geometry())) { ellipsoidalTarget = true; targetSemiAxes = dynamic_cast<WorldGeometry*>(target()->geometry())->ellipsoidAxes().cast<double>() / 2.0; } Quaterniond rotation = source()->orientation(currentTime); Matrix3d m = (rotation * m_orientation).toRotationMatrix(); double horizontalSize = tan(m_frustumHorizontalAngle / 2.0); double verticalSize = tan(m_frustumVerticalAngle / 2.0); bool showInside = false; rc.pushModelView(); rc.rotateModelView(rotation.cast<float>().conjugate()); m_frustumPoints.clear(); const unsigned int sideDivisions = 12; const unsigned int sections = 4 * sideDivisions; for (unsigned int i = 0; i < sections; ++i) { Vector3d r; if (frustumShape() == Elliptical) { double t = (double) i / (double) sections; double theta = 2 * PI * t; r = Vector3d(horizontalSize * cos(theta), verticalSize * sin(theta), 1.0).normalized(); } else { if (i < sideDivisions) { double t = i / double(sideDivisions); r = Vector3d((t - 0.5) * horizontalSize, -verticalSize * 0.5, 1.0).normalized(); } else if (i < sideDivisions * 2) { double t = (i - sideDivisions) / double(sideDivisions); r = Vector3d(horizontalSize * 0.5, (t - 0.5) * verticalSize, 1.0).normalized(); } else if (i < sideDivisions * 3) { double t = (i - sideDivisions * 2) / double(sideDivisions); r = Vector3d((0.5 - t) * horizontalSize, verticalSize * 0.5, 1.0).normalized(); } else { double t = (i - sideDivisions * 3) / double(sideDivisions); r = Vector3d(-horizontalSize * 0.5, (0.5 - t) * verticalSize, 1.0).normalized(); } } r = m * r; double intersectDistance = m_range; if (TestRayEllipsoidIntersection(p2, targetRotation * r, targetSemiAxes, &intersectDistance)) { // Reduce the intersect distance slightly to reduce depth precision problems // when drawing the sensor footprint on a planet surface. intersectDistance *= 0.9999; } m_frustumPoints.push_back(r * min(m_range, intersectDistance)); } if (m_opacity > 0.0f) { // Draw the frustum material.setOpacity(m_opacity); rc.bindMaterial(&material); if (showInside) { glDisable(GL_CULL_FACE); } glBegin(GL_TRIANGLE_FAN); glVertex3d(0.0, 0.0, 0.0); for (int i = (int) m_frustumPoints.size() - 1; i >= 0; --i) { glVertex3dv(m_frustumPoints[i].data()); } glVertex3dv(m_frustumPoints.back().data()); glEnd(); glEnable(GL_CULL_FACE); } if (m_footprintOpacity > 0.0f) { material.setOpacity(1.0f); rc.bindMaterial(&material); glBegin(GL_LINE_STRIP); for (unsigned int i = 0; i < m_frustumPoints.size(); ++i) { glVertex3dv(m_frustumPoints[i].data()); } glVertex3dv(m_frustumPoints[0].data()); glEnd(); } if (m_gridOpacity > 0.0f) { // Draw grid lines unsigned int ringCount = 8; unsigned int rayCount = frustumShape() == Rectangular ? 4 : 8; material.setOpacity(m_gridOpacity); rc.bindMaterial(&material); for (unsigned int i = 1; i < ringCount; ++i) { double t = (double) i / (double) ringCount; glBegin(GL_LINE_LOOP); for (unsigned int j = 0; j < m_frustumPoints.size(); ++j) { Vector3d v = m_frustumPoints[j] * t; glVertex3dv(v.data()); } glEnd(); } unsigned int rayStep = sections / rayCount; glBegin(GL_LINES); for (unsigned int i = 0; i < sections; i += rayStep) { glVertex3d(0.0, 0.0, 0.0); glVertex3dv(m_frustumPoints[i].data()); } glEnd(); } rc.popModelView(); } }
void PlanetGridLayer::renderTile(RenderContext& rc, const WorldGeometry* world, const QuadtreeTile* tile) const { float radius = world->ellipsoidAxes().minCoeff() * 0.5f; Vector3f cameraPosition = (rc.modelview().inverse() * Vector4f::UnitW()).start<3>(); float distToCenter = rc.modelview().translation().norm(); float altitude = max(1.0f, distToCenter - radius); float apparentSize = radius / altitude; // Compute the approximate projected size of the planet in pixels float pixelSize = apparentSize / rc.pixelSize(); float idealLatSpacing = 360.0f / (pixelSize / 30.0f); // Fade out grid when grid spacing gets large float opacity = m_gridOpacity; if (idealLatSpacing > 45.0f) { opacity *= max(0.0f, (90.0f - idealLatSpacing) / 45.0f); } if (opacity == 0.0f) { return; } rc.setVertexInfo(VertexSpec::Position); Material simpleMaterial; simpleMaterial.setDiffuse(m_gridColor); simpleMaterial.setOpacity(opacity); rc.bindMaterial(&simpleMaterial); // Reduce meridian spacing when the observer is close to the pole float z = abs(cameraPosition.normalized().z()); float idealLonSpacing = idealLatSpacing / max(0.01f, sqrt(1.0f - z * z)); // TODO: Reduce spacing further when observer is close to the surface // and looking out along the horizon. float latSpacing = float(toRadians(chooseGridSpacing(idealLatSpacing))); float lonSpacing = float(toRadians(chooseGridSpacing(idealLonSpacing))); float tileArc = float(PI) * tile->extent(); Vector2f southwest = tile->southwest(); float lonWest = float(PI) * southwest.x(); float lonEast = lonWest + tileArc; float latSouth = float(PI) * southwest.y(); float latNorth = latSouth + tileArc; int firstMeridian = int(ceil(lonWest / lonSpacing)); int lastMeridian = int(floor(lonEast / lonSpacing)); int firstParallel = int(ceil(latSouth / latSpacing)); int lastParallel = int(floor(latNorth / latSpacing)); for (int i = firstMeridian; i <= lastMeridian; ++i) { drawMeridian(i * lonSpacing, latSouth, latNorth); } for (int i = firstParallel; i <= lastParallel; ++i) { drawParallel(i * latSpacing, lonWest, lonEast); } // TODO: Draw coordinate labels float subLon = atan2(cameraPosition.y(), cameraPosition.z()); float subLat = acos(cameraPosition.normalized().z()); float meridian = std::floor(subLon) / idealLonSpacing; }
void SimpleTrajectoryGeometry::render(RenderContext& rc, double clock) const { double t0 = firstSampleTime(); double t1 = lastSampleTime(); double fadeRate = 0.0; double fadeStartTime = 0.0; double fadeStartValue = 1.0; // Only draw during the appropriate render pass if ((rc.pass() == RenderContext::OpaquePass && !isOpaque()) || (rc.pass() == RenderContext::TranslucentPass && isOpaque())) { return; } if (displayedPortion() == TrajectoryGeometry::WindowBeforeCurrentTime) { t0 = clock + m_windowLead - m_windowDuration; t1 = clock + m_windowLead; fadeStartTime = t0; fadeStartValue = 0.0; double fadeEndTime = fadeStartTime + m_windowDuration * m_fadeFraction; fadeRate = 1.0 / (fadeEndTime - fadeStartTime); } // Nothing to be drawn if (t1 <= t0) { return; } // Basic opacity of the plot. It may be modified based on three things: // - Approximate size in pixels of the trajectory (small trajectories will fade out) // - Distance from the camera to the 'front' (usually the current position of the oribiting body) // - 'Age' of the trajectory: typically the most recent portions are drawn more opaque than the // older parts. This is handled by setting per-vertex colors. float opacity = 0.99f * m_opacity; const float sizeFadeStart = 30.0f; const float sizeFadeEnd = 15.0f; float pixelSize = boundingSphereRadius() / (rc.modelview().translation().norm() * rc.pixelSize()); if (pixelSize < sizeFadeStart) { opacity *= std::max(0.0f, (pixelSize - sizeFadeEnd) / (sizeFadeStart - sizeFadeEnd)); } if (opacity <= 0.0f) { // Complete fade out; no need to draw anything. return; } rc.pushModelView(); if (m_frame.isValid()) { rc.rotateModelView(m_frame->orientation(clock).cast<float>()); } TrajectoryVertex vertex; vertex.color[0] = (unsigned char) (m_color.red() * 255.99f); vertex.color[1] = (unsigned char) (m_color.green() * 255.99f); vertex.color[2] = (unsigned char) (m_color.blue() * 255.99f); vertex.color[3] = 255; m_vertexData.clear(); unsigned int sampleIndex = 0; while (sampleIndex < m_samples.size() && t0 > m_samples[sampleIndex].timeTag) { ++sampleIndex; } if (sampleIndex > 0 && sampleIndex < m_samples.size()) { double dt = m_samples[sampleIndex].timeTag - m_samples[sampleIndex - 1].timeTag; double t = (t0 - m_samples[sampleIndex - 1].timeTag) / dt; Vector3d interpolated = interpolateSamples(t, dt, m_samples[sampleIndex - 1], m_samples[sampleIndex]); float alpha = std::max(0.0f, std::min(1.0f, float(fadeStartValue + (t0 - fadeStartTime) * fadeRate))); vertex.position = interpolated.cast<float>(); vertex.color[3] = (unsigned char) (alpha * 255.99f); m_vertexData.push_back(vertex); } while (sampleIndex < m_samples.size() && t1 > m_samples[sampleIndex].timeTag) { float alpha = std::max(0.0f, std::min(1.0f, float(fadeStartValue + (m_samples[sampleIndex].timeTag - fadeStartTime) * fadeRate))); vertex.position = m_samples[sampleIndex].position.cast<float>(); vertex.color[3] = (unsigned char) (alpha * 255.99f); m_vertexData.push_back(vertex); ++sampleIndex; } if (sampleIndex > 0 && sampleIndex < m_samples.size()) { double dt = m_samples[sampleIndex].timeTag - m_samples[sampleIndex - 1].timeTag; double t = (t1 - m_samples[sampleIndex - 1].timeTag) / dt; Vector3d interpolated = interpolateSamples(t, dt, m_samples[sampleIndex - 1], m_samples[sampleIndex]); float alpha = std::max(0.0f, std::min(1.0f, float(fadeStartValue + (t1 - fadeStartTime) * fadeRate))); vertex.position = interpolated.cast<float>(); vertex.color[3] = (unsigned char) (alpha * 255.99f); m_vertexData.push_back(vertex); } // Fade trajectory based on size Vector3f frontPosition = rc.modelview() * vertex.position; float frontDistance = frontPosition.norm(); // Fade trajectory based on distance to front point. This is helpful because the simple trajectory model // is not precise, and fading hides the discrepancy between the plot and the body's current position. const float fadeStart = 0.04f; const float fadeFinish = 0.01f; if (frontDistance < fadeStart * boundingSphereRadius()) { opacity *= std::max(0.0f, (frontDistance / boundingSphereRadius() - fadeFinish) / (fadeStart - fadeFinish)); } Material material; material.setDiffuse(Spectrum::White()); material.setOpacity(opacity); rc.bindMaterial(&material); glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); if (m_vertexData.size() > 1 && opacity > 0.0f) { rc.bindVertexArray(VertexSpec::PositionColor, m_vertexData[0].position.data(), sizeof(TrajectoryVertex)); rc.drawPrimitives(PrimitiveBatch(PrimitiveBatch::LineStrip, m_vertexData.size() - 1, 0)); rc.unbindVertexArray(); } glDisable(GL_BLEND); rc.popModelView(); }
void SkyImageLayer::render(RenderContext& rc) { // Don't render anything if the sky texture isn't resident if (m_texture.isNull() || m_texture->makeResident() == false) { return; } rc.pushModelView(); rc.rotateModelView(m_orientation.cast<float>()); // Get the position of the eye in model coordinates *before* scaling Transform3f invModelView = Transform3f(rc.modelview().inverse()); Vector3f eyePosition = invModelView * Vector3f::Zero(); // Compute the culling planes. Use the horizon distance for the far plane in order // to cull as many surface patches as possible. Frustum viewFrustum = rc.frustum(); float farDistance = 2.0e6f; Matrix4f modelviewTranspose = rc.modelview().matrix().transpose(); CullingPlaneSet cullingPlanes; for (unsigned int i = 0; i < 4; ++i) { cullingPlanes.planes[i] = Hyperplane<float, 3>(viewFrustum.planeNormals[i].cast<float>(), 0.0f); cullingPlanes.planes[i].coeffs() = modelviewTranspose * cullingPlanes.planes[i].coeffs(); } cullingPlanes.planes[4].coeffs() = modelviewTranspose * Vector4f(0.0f, 0.0f, -1.0f, -viewFrustum.nearZ); cullingPlanes.planes[5].coeffs() = modelviewTranspose * Vector4f(0.0f, 0.0f, 1.0f, farDistance); // Set the vertex information directly; we should change this so that // we're using a vertex array instead of immediate mode rendering. // Switch to unlit rendering by disabling surface normals // required for lighting. unsigned int tileFeatures = 0; Material material; material.setDiffuse(m_tintColor); material.setEmission(Spectrum::Black()); material.setOpacity(m_opacity); material.setBaseTexture(m_texture.ptr()); rc.bindMaterial(&material); // Create the root quadtree nodes. Presently, we always start with two root // tiles: one for the western hemisphere and one for the eastern hemisphere. // But, depending on what sort of tiles we have, a different set of root // tiles might be more appropriate. Vector3f semiAxes = Vector3f::Constant(1.0f); m_tileAllocator->clear(); QuadtreeTile* westHemi = m_tileAllocator->newRootTile(0, 0, Vector2f(-1.0f, -0.5f), 1.0f, semiAxes); QuadtreeTile* eastHemi = m_tileAllocator->newRootTile(0, 1, Vector2f( 0.0f, -0.5f), 1.0f, semiAxes); // Set up the neighbor connections for the root nodes. Since the map wraps, // the eastern hemisphere is both the east and west neighbor of the western // hemisphere (and vice versa.) There are no north and south neighbors. westHemi->setNeighbor(QuadtreeTile::West, eastHemi); westHemi->setNeighbor(QuadtreeTile::East, eastHemi); eastHemi->setNeighbor(QuadtreeTile::West, westHemi); eastHemi->setNeighbor(QuadtreeTile::East, westHemi); // TODO: Consider map tile resolution when setting the split threshold float splitThreshold = rc.pixelSize() * MaxSkyImageTileSquareSize * QuadtreeTile::TileSubdivision; westHemi->tessellate(eyePosition, cullingPlanes, semiAxes, splitThreshold, rc.pixelSize()); eastHemi->tessellate(eyePosition, cullingPlanes, semiAxes, splitThreshold, rc.pixelSize()); glCullFace(GL_FRONT); westHemi->render(rc, tileFeatures); eastHemi->render(rc, tileFeatures); glCullFace(GL_BACK); rc.popModelView(); }
void MeshGeometry::render(RenderContext& rc, double /* clock */) const { if (!m_hwBuffersCurrent) { realize(); } // Track the last used material in order to avoid redundant // material bindings. unsigned int lastMaterialIndex = Submesh::DefaultMaterialIndex; rc.pushModelView(); rc.scaleModelView(m_meshScale); // Render all submeshes GLVertexBuffer* boundVertexBuffer = NULL; for (unsigned int i = 0; i < m_submeshes.size(); ++i) { const Submesh& submesh = *m_submeshes[i]; if (i < m_submeshBuffers.size() && m_submeshBuffers[i]) { boundVertexBuffer = m_submeshBuffers[i]; rc.bindVertexBuffer(submesh.vertices()->vertexSpec(), m_submeshBuffers[i], submesh.vertices()->stride()); } else { if (boundVertexBuffer) { boundVertexBuffer->unbind(); boundVertexBuffer = false; } rc.bindVertexArray(submesh.vertices()); } const vector<PrimitiveBatch*>& batches = submesh.primitiveBatches(); const vector<unsigned int>& materials = submesh.materials(); assert(batches.size() == materials.size()); // Render all batches in the submesh for (unsigned int j = 0; j < batches.size(); j++) { // If we have a new material, bind it unsigned int materialIndex = materials[j]; if (materialIndex != lastMaterialIndex) { if (materialIndex < m_materials.size()) { rc.bindMaterial(m_materials[materialIndex].ptr()); } lastMaterialIndex = materialIndex; } rc.drawPrimitives(*batches[j]); } } if (boundVertexBuffer) { boundVertexBuffer->unbind(); } rc.popModelView(); }
void MeshGeometry::renderShadow(RenderContext& rc, double /* clock */) const { if (!m_hwBuffersCurrent) { realize(); } // Use an extremely basic material to avoid wasting time // with pixel shader calculations when we're just interested // in depth values. Material simpleMaterial; rc.bindMaterial(&simpleMaterial); rc.pushModelView(); rc.scaleModelView(m_meshScale); // Render all submeshes GLVertexBuffer* boundVertexBuffer = NULL; for (unsigned int i = 0; i < m_submeshes.size(); ++i) { const Submesh& submesh = *m_submeshes[i]; if (i < m_submeshBuffers.size() && m_submeshBuffers[i]) { boundVertexBuffer = m_submeshBuffers[i]; rc.bindVertexBuffer(submesh.vertices()->vertexSpec(), m_submeshBuffers[i], submesh.vertices()->stride()); } else { if (boundVertexBuffer) { boundVertexBuffer->unbind(); boundVertexBuffer = false; } rc.bindVertexArray(submesh.vertices()); } const vector<PrimitiveBatch*>& batches = submesh.primitiveBatches(); const vector<unsigned int>& materials = submesh.materials(); assert(batches.size() == materials.size()); // Render all batches in the submesh for (unsigned int j = 0; j < batches.size(); j++) { // Skip mostly transparent items when drawing into the shadow // buffer. // TODO: Textures with transparent parts aren't handled here unsigned int materialIndex = materials[j]; if (materialIndex >= m_materials.size() || m_materials[materialIndex]->opacity() > 0.5f) { rc.drawPrimitives(*batches[j]); } } } if (boundVertexBuffer) { boundVertexBuffer->unbind(); } rc.popModelView(); }
void VectorMapLayer::renderTile(RenderContext& rc, const WorldGeometry* /* world */, const QuadtreeTile* tile) const { #ifndef VESTA_OGLES2 rc.setVertexInfo(VertexSpec::PositionColor); Material simpleMaterial; simpleMaterial.setDiffuse(Spectrum(1.0f, 1.0f, 1.0f)); simpleMaterial.setOpacity(1.0f); rc.bindMaterial(&simpleMaterial); float tileArc = float(PI) * tile->extent(); Vector2f southwest = tile->southwest(); SpherePatch box; box.west = float(PI) * southwest.x(); box.east = box.west + tileArc; box.south = float(PI) * southwest.y(); box.north = box.south + tileArc; AlignedBox<float, 2> bounds(Vector2f(box.west, box.south), Vector2f(box.east, box.north)); for (vector<counted_ptr<MapElement> >::const_iterator iter = m_elements.begin(); iter != m_elements.end(); ++iter) { const MapElement* element = iter->ptr(); bool tileContainsElement = false; if (element) { AlignedBox<float, 2> elementBox = element->bounds(); if (!elementBox.isNull()) { if (elementBox.min().x() < bounds.max().x() && elementBox.max().x() > bounds.min().x() && elementBox.min().y() < bounds.max().y() && elementBox.max().y() > bounds.min().y()) { tileContainsElement = true; } } } if (tileContainsElement) { Spectrum color = element->color(); glColor4f(color.red(), color.green(), color.blue(), element->opacity()); if (element->opacity() < 1.0f) { glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); } else { glDisable(GL_BLEND); } element->render(box.west, box.south, box.east, box.north); } } glDisable(GL_BLEND); #endif }