void Channel::frameViewFinish( const eq::uint128_t& frameID ) { if( stopRendering( )) return; applyBuffer(); const FrameData& frameData = _getFrameData(); Accum& accum = _accum[ lunchbox::getIndexOfLastBit( getEye()) ]; if( accum.buffer ) { const eq::PixelViewport& pvp = getPixelViewport(); const bool isResized = accum.buffer->resize( pvp ); if( isResized ) { const View* view = static_cast< const View* >( getView( )); accum.buffer->clear(); accum.step = view->getIdleSteps(); accum.stepsDone = 0; } else if( frameData.isIdle( )) { setupAssemblyState(); if( !_isDone() && accum.transfer ) accum.buffer->accum(); accum.buffer->display(); resetAssemblyState(); } } applyViewport(); _drawOverlay(); _drawHelp(); if( frameData.useStatistics()) drawStatistics(); int32_t steps = 0; if( frameData.isIdle( )) { for( size_t i = 0; i < eq::NUM_EYES; ++i ) steps = LB_MAX( steps, _accum[i].step ); } else { const View* view = static_cast< const View* >( getView( )); steps = view ? view->getIdleSteps() : 0; } // if _jitterStep == 0 and no user redraw event happened, the app will exit // FSAA idle mode and block on the next redraw event. eq::Config* config = getConfig(); config->sendEvent( IDLE_AA_LEFT ) << steps; eq::Channel::frameViewFinish( frameID ); }
void D3D11RenderAPI::setRenderTarget(const SPtr<RenderTargetCore>& target, bool readOnlyDepthStencil) { THROW_IF_NOT_CORE_THREAD; mActiveRenderTarget = target; UINT32 maxRenderTargets = mCurrentCapabilities->getNumMultiRenderTargets(); ID3D11RenderTargetView** views = bs_newN<ID3D11RenderTargetView*>(maxRenderTargets); memset(views, 0, sizeof(ID3D11RenderTargetView*) * maxRenderTargets); ID3D11DepthStencilView* depthStencilView = nullptr; if (target != nullptr) { target->getCustomAttribute("RTV", views); if(readOnlyDepthStencil) target->getCustomAttribute("RODSV", &depthStencilView); else target->getCustomAttribute("DSV", &depthStencilView); } // Bind render targets mDevice->getImmediateContext()->OMSetRenderTargets(maxRenderTargets, views, depthStencilView); if (mDevice->hasError()) BS_EXCEPT(RenderingAPIException, "Failed to setRenderTarget : " + mDevice->getErrorDescription()); bs_deleteN(views, maxRenderTargets); applyViewport(); BS_INC_RENDER_STAT(NumRenderTargetChanges); }
void D3D11RenderAPI::setViewport(const Rect2& vp) { THROW_IF_NOT_CORE_THREAD; mViewportNorm = vp; applyViewport(); }
void Channel::frameClear( const eq::uint128_t& /*frameID*/ ) { if( stopRendering( )) return; _initJitter(); resetRegions(); const FrameData& frameData = _getFrameData(); const int32_t eyeIndex = lunchbox::getIndexOfLastBit( getEye() ); if( _isDone() && !_accum[ eyeIndex ].transfer ) return; applyBuffer(); applyViewport(); const eq::View* view = getView(); if( view && frameData.getCurrentViewID() == view->getID( )) glClearColor( 1.f, 1.f, 1.f, 0.f ); #ifndef NDEBUG else if( getenv( "EQ_TAINT_CHANNELS" )) { const eq::Vector3ub color = getUniqueColor(); glClearColor( color.r()/255.f, color.g()/255.f, color.b()/255.f, 0.f ); } #endif // NDEBUG else glClearColor( 0.f, 0.f, 0.f, 0.0f ); glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ); }
void Channel::frameDraw( const eq::uint128_t& frameID ) { //----- setup GL state applyBuffer(); applyViewport(); glMatrixMode( GL_PROJECTION ); glLoadIdentity(); applyFrustum(); glMatrixMode( GL_MODELVIEW ); glLoadIdentity(); applyHeadTransform(); setupAssemblyState(); _testFormats( 1.0f ); _testFormats( 0.5f ); _testFormats( 2.0f ); _testTiledOperations(); _testDepthAssemble(); resetAssemblyState(); }
void qAnimationDlg::preview() { //we'll take the rendering time into account! QElapsedTimer timer; timer.start(); setEnabled(false); //reset the interpolators and count the total number of frames int frameCount = countFrameAndResetInterpolators(); //show progress dialog QProgressDialog progressDialog(QString("Frames: %1").arg(frameCount), "Cancel", 0, frameCount, this); progressDialog.setWindowTitle("Preview"); progressDialog.show(); QApplication::processEvents(); double fps = fpsSpinBox->value(); int frameIndex = 0; for ( size_t i=0; i<m_videoSteps.size(); ++i ) { VideoStepItem& currentVideoStep = m_videoSteps[i]; //theoretical waiting time per frame qint64 delay_ms = static_cast<int>(1000 * currentVideoStep.duration_sec / fps); cc2DViewportObject currentParams; while ( currentVideoStep.interpolator.nextView( currentParams ) ) { timer.restart(); applyViewport ( ¤tParams ); qint64 dt_ms = timer.elapsed(); progressDialog.setValue(++frameIndex); QApplication::processEvents(); if (progressDialog.wasCanceled()) { break; } //remaining time if (dt_ms < delay_ms) { int wait_ms = static_cast<int>(delay_ms - dt_ms); #if defined(CC_WINDOWS) ::Sleep( wait_ms ); #else usleep( wait_ms * 1000 ); #endif } } } //reset view onCurrentStepChanged( getCurrentStepIndex() ); setEnabled(true); }
void qAnimationDlg::onCurrentStepChanged(int index) { //update current step descriptor stepIndexLabel->setText(QString::number(index+1)); updateCurrentStepDuration(); applyViewport( m_videoSteps[index].interpolator.view1() ); }
void Channel::frameAssemble( const eq::uint128_t& frameID ) { if( stopRendering( )) return; if( _isDone( )) return; Accum& accum = _accum[ lunchbox::getIndexOfLastBit( getEye()) ]; if( getPixelViewport() != _currentPVP ) { accum.transfer = true; if( accum.buffer && !accum.buffer->usesFBO( )) { LBWARN << "Current viewport different from view viewport, " << "idle anti-aliasing not implemented." << std::endl; accum.step = 0; } eq::Channel::frameAssemble( frameID ); return; } // else accum.transfer = true; const eq::Frames& frames = getInputFrames(); for( eq::Frames::const_iterator i = frames.begin(); i != frames.end(); ++i ) { eq::Frame* frame = *i; const eq::SubPixel& curSubPixel = frame->getSubPixel(); if( curSubPixel != eq::SubPixel::ALL ) accum.transfer = false; accum.stepsDone = LB_MAX( accum.stepsDone, frame->getSubPixel().size*frame->getPeriod( )); } applyBuffer(); applyViewport(); setupAssemblyState(); try { eq::Compositor::assembleFrames( getInputFrames(), this, accum.buffer ); } catch( const co::Exception& e ) { LBWARN << e.what() << std::endl; } resetAssemblyState(); }
void Channel::frameAssemble( const eq::uint128_t& frameID, const eq::Frames& frames ) { if( stopRendering( )) return; if( _isDone( )) return; Accum& accum = _accum[ lunchbox::getIndexOfLastBit( getEye()) ]; if( getPixelViewport() != _currentPVP ) { accum.transfer = true; if( accum.buffer && !accum.buffer->usesFBO( )) { LBWARN << "Current viewport different from view viewport, " << "idle anti-aliasing not implemented." << std::endl; accum.step = 0; } eq::Channel::frameAssemble( frameID, frames ); return; } // else accum.transfer = true; for( eq::Frame* frame : frames ) { const eq::SubPixel& subPixel = frame->getFrameData()->getContext().subPixel; if( subPixel != eq::SubPixel::ALL ) accum.transfer = false; accum.stepsDone = LB_MAX( accum.stepsDone, subPixel.size * frame->getFrameData()->getContext().period ); } applyBuffer(); applyViewport(); setupAssemblyState(); try { eq::Compositor::assembleFrames( frames, this, accum.buffer.get( )); } catch( const co::Exception& e ) { LBWARN << e.what() << std::endl; } resetAssemblyState(); }
void Channel::frameViewFinish( const uint128_t& frameID ) { const ViewData* data = getViewData(); EQASSERT( data ); if( !data || !data->getStatistics( )) return; applyBuffer(); applyViewport(); drawStatistics(); }
void Channel::frameClear( const eq::uint128_t& frameID ) { applyBuffer(); applyViewport(); if( getRange() == eq::Range::ALL ) glClearColor( _bgColor.r(), _bgColor.g(), _bgColor.b(), 1.0f ); else glClearColor( 0.0f, 0.0f, 0.0f, 1.0f ); glClear( GL_COLOR_BUFFER_BIT ); }
void Channel::frameViewFinish(const uint128_t& frameID) { const ViewData* data = getViewData(); LBASSERT(data); if (data && data->getStatistics()) { applyBuffer(); applyViewport(); drawStatistics(); } eq::Channel::frameViewFinish(frameID); }
void Channel::frameDraw( const eq::uint128_t& ) { // Setup frustum EQ_GL_CALL( applyBuffer( )); EQ_GL_CALL( applyViewport( )); EQ_GL_CALL( glMatrixMode( GL_PROJECTION )); EQ_GL_CALL( glLoadIdentity( )); EQ_GL_CALL( applyFrustum( )); EQ_GL_CALL( glMatrixMode( GL_MODELVIEW )); EQ_GL_CALL( glLoadIdentity( )); // Setup lights before applying head transform, so the light will be // consistent in the cave const FrameData& frameData = _getFrameData(); const eq::Matrix4f& rotation = frameData.getRotation(); const eq::Vector3f& translation = frameData.getTranslation(); eq::Matrix4f invRotationM; rotation.inverse( invRotationM ); setLights( invRotationM ); EQ_GL_CALL( applyHeadTransform( )); glTranslatef( translation.x(), translation.y(), translation.z() ); glMultMatrixf( rotation.array ); Pipe* pipe = static_cast<Pipe*>( getPipe( )); Renderer* renderer = pipe->getRenderer(); LBASSERT( renderer ); const eq::Matrix4f& modelview = _computeModelView(); // set fancy data colors const eq::Vector4f taintColor = _getTaintColor( frameData.getColorMode(), getUniqueColor( )); const int normalsQuality = _getFrameData().getNormalsQuality(); const eq::Range& range = getRange(); renderer->render( range, modelview, invRotationM, taintColor, normalsQuality ); checkError( "error during rendering " ); _drawRange = range; #ifndef NDEBUG outlineViewport(); #endif }
void qAnimationDlg::onCurrentStepChanged(int index) { //update current step descriptor stepIndexLabel->setText(QString::number(index+1)); updateCurrentStepDuration(); applyViewport( m_videoSteps[index].viewport ); //check that the step is enabled bool isEnabled = (stepSelectionList->item(index)->checkState() == Qt::Checked); bool isLoop = loopCheckBox->isChecked(); currentStepGroupBox->setEnabled(isEnabled && (index+1 < m_videoSteps.size() || isLoop)); }
/*---------------------------------------------------------------------*//** 実解像度を設定 **//*---------------------------------------------------------------------*/ void View::setRealSize(f32 width, f32 height, bool setViewport) { _widthCur = width; _heightCur = height; _aspectCur = _widthCur / _heightCur; _aspectCurInv = 1.0f / _aspectCur; // 論理サイズが未設定の場合は設定する if ((_widthLgc == 0) || (_heightLgc == 0)) // 論理サイズが未設定 { _widthLgc = width; _heightLgc = height; } if (setViewport) { applyViewport(); } }
void Channel::frameDraw( const eq::uint128_t& frameID ) { // Setup frustum EQ_GL_CALL( applyBuffer( )); EQ_GL_CALL( applyViewport( )); EQ_GL_CALL( glMatrixMode( GL_PROJECTION )); EQ_GL_CALL( glLoadIdentity( )); EQ_GL_CALL( applyFrustum( )); EQ_GL_CALL( glMatrixMode( GL_MODELVIEW )); EQ_GL_CALL( glLoadIdentity( )); // Setup lights before applying head transform, so the light will be // consistent in the cave const FrameData& frameData = _getFrameData(); const eq::Matrix4f& rotation = frameData.getRotation(); const eq::Vector3f& translation = frameData.getTranslation(); eq::Matrix4f invRotationM; rotation.inverse( invRotationM ); setLights( invRotationM ); EQ_GL_CALL( applyHeadTransform( )); glTranslatef( translation.x(), translation.y(), translation.z() ); glMultMatrixf( rotation.array ); Pipe* pipe = static_cast<Pipe*>( getPipe( )); Renderer* renderer = pipe->getRenderer(); EQASSERT( renderer ); eq::Matrix4f modelviewM; // modelview matrix eq::Matrix3f modelviewITM; // modelview inversed transposed matrix _calcMVandITMV( modelviewM, modelviewITM ); const eq::Range& range = getRange(); renderer->render( range, modelviewM, modelviewITM, invRotationM ); checkError( "error during rendering " ); _drawRange = range; #ifndef NDEBUG outlineViewport(); #endif }
/*---------------------------------------------------------------------*//** 論理解像度を設定 **//*---------------------------------------------------------------------*/ void View::setLogicalSize(f32 width, f32 height, bool setViewport) { // 論理サイズ設定 _widthLgc = width; _heightLgc = height; _acpectLgc = _widthLgc / _heightLgc; _acpectLgcInv = 1.0f / _acpectLgc; // 実解像度が未設定の場合は設定する if ((_widthCur == 0) || (_heightCur == 0)) // ビューポートの大きさが未設定 { _widthCur = width; _heightCur = height; } if (setViewport) { applyViewport(); } }
TFW_BEGIN_NS //////////////////////////////////////////////////////////////////////////// //========================================================================== // View メソッド // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // 外部サービス /*---------------------------------------------------------------------*//** ビューモードを設定 **//*---------------------------------------------------------------------*/ void View::setViewMode(ViewMode vmode, bool setViewport) { _vmode = vmode; if (setViewport) { applyViewport(); } }
void qAnimationDlg::preview() { //we'll take the rendering time into account! QElapsedTimer timer; timer.start(); setEnabled(false); size_t vp1 = previewFromSelectedCheckBox->isChecked() ? static_cast<size_t>(getCurrentStepIndex()) : 0; //count the total number of frames int frameCount = countFrames(loopCheckBox->isChecked() ? 0 : vp1); int fps = fpsSpinBox->value(); //show progress dialog QProgressDialog progressDialog(QString("Frames: %1").arg(frameCount), "Cancel", 0, frameCount, this); progressDialog.setWindowTitle("Preview"); progressDialog.show(); progressDialog.setModal(true); progressDialog.setAutoClose(false); QApplication::processEvents(); assert(stepSelectionList->count() >= m_videoSteps.size()); int frameIndex = 0; size_t vp2 = 0; while (getNextSegment(vp1, vp2)) { Step& step1 = m_videoSteps[vp1]; Step& step2 = m_videoSteps[vp2]; //theoretical waiting time per frame qint64 delay_ms = static_cast<int>(1000 * step1.duration_sec / fps); int frameCount = static_cast<int>( fps * step1.duration_sec ); ViewInterpolate interpolator(step1.viewport, step2.viewport); interpolator.setMaxStep(frameCount); cc2DViewportObject currentParams; while ( interpolator.nextView( currentParams ) ) { timer.restart(); applyViewport ( ¤tParams ); qint64 dt_ms = timer.elapsed(); progressDialog.setValue(++frameIndex); QApplication::processEvents(); if (progressDialog.wasCanceled()) { break; } //remaining time if (dt_ms < delay_ms) { int wait_ms = static_cast<int>(delay_ms - dt_ms); #if defined(CC_WINDOWS) ::Sleep( wait_ms ); #else usleep( wait_ms * 1000 ); #endif } } if (progressDialog.wasCanceled()) { break; } if (vp2 == 0) { assert(loopCheckBox->isChecked()); frameIndex = 0; } vp1 = vp2; } //reset view onCurrentStepChanged(getCurrentStepIndex()); setEnabled(true); }
void Camera::apply() { _viewProjectionUpdated = _transformUpdated; applyFrameBufferObject(); applyViewport(); }
void qAnimationDlg::render() { QString outputFilename = outputFileLineEdit->text(); //save to persistent settings { QSettings settings; settings.beginGroup("qAnimation"); settings.setValue("filename", outputFilename); settings.endGroup(); } setEnabled(false); //reset the interpolators and count the total number of frames int frameCount = countFrameAndResetInterpolators(); //show progress dialog QProgressDialog progressDialog(QString("Frames: %1").arg(frameCount), "Cancel", 0, frameCount, this); progressDialog.setWindowTitle("Render"); progressDialog.show(); QApplication::processEvents(); #ifdef QFFMPEG_SUPPORT //get original viewport size QSize originalViewSize = m_view3d->size(); //hack: as the encoder requires that the video dimensions are multiples of 8, we resize the window a little bit... { //find the nearest multiples of 8 QSize customSize = originalViewSize; if (originalViewSize.width() % 8 || originalViewSize.height() % 8) { if (originalViewSize.width() % 8) customSize.setWidth((originalViewSize.width() / 8 + 1) * 8); if (originalViewSize.height() % 8) customSize.setHeight((originalViewSize.height() / 8 + 1) * 8); m_view3d->resize(customSize); QApplication::processEvents(); } } int bitrate = bitrateSpinBox->value(); int gop = 12; QVideoEncoder encoder(outputFilename, m_view3d->width(), m_view3d->height(), bitrate, gop, static_cast<unsigned>(fpsSpinBox->value())); QString errorString; if (!encoder.open(&errorString)) { QMessageBox::critical(this, "Error", QString("Failed to open file for output: %1").arg(errorString)); setEnabled(true); return; } #endif int frameIndex = 0; bool success = true; for ( size_t i=0; i<m_videoSteps.size(); ++i ) { VideoStepItem& currentVideoStep = m_videoSteps[i]; cc2DViewportObject current_params; while ( currentVideoStep.interpolator.nextView( current_params ) ) { applyViewport ( ¤t_params ); //render to image QImage image = m_view3d->renderToImage(1.0 , true, false, true ); ++frameIndex; if (image.isNull()) { QMessageBox::critical(this, "Error", "Failed to grab the screen!"); success = false; break; } #ifdef QFFMPEG_SUPPORT if (!encoder.encodeImage(image, &errorString)) { QMessageBox::critical(this, "Error", QString("Failed to encode frame #%1: %2").arg(frameIndex).arg(errorString)); success = false; break; } #else QString filename = QString("frame_%1.png").arg(frameIndex,6,10,QChar('0')); QString fullPath = QDir(outputFilename).filePath(filename); if (!image.save(fullPath)) { QMessageBox::critical(this, "Error", QString("Failed to save frame #%1").arg(frameIndex)); success = false; break; } #endif progressDialog.setValue(frameIndex); QApplication::processEvents(); if (progressDialog.wasCanceled()) { QMessageBox::warning(this, "Warning", QString("Process has been cancelled")); success = false; break; } } if (!success) { break; } } #ifdef QFFMPEG_SUPPORT encoder.close(); //hack: restore original size m_view3d->resize(originalViewSize); QApplication::processEvents(); #endif progressDialog.hide(); QApplication::processEvents(); if (success) { QMessageBox::information(this, "Job done", "The animation has been saved successfully"); } setEnabled(true); }
void Channel::_startAssemble() { applyBuffer(); applyViewport(); setupAssemblyState(); }
void DrawExecution::perform() { m_glStateManager.enableTextureCubeMapSeamless(true); // Apply State applyDepthState(); applyBlendState(); applyCullState(); applyRasterizerState(); applyStencilState(); applyViewport(); gl::glUseProgram(m_drawImpl.program->glProgramName); // Setup texture units for (auto b = 0u; b < m_drawImpl.samplers.size(); b++) { auto & sampler = m_drawImpl.samplers[b]; auto * texture = sampler.texture; Assert(texture, ""); if (texture->glName == 0) { texture->allocate(); } gl::glActiveTexture(gl::GL_TEXTURE0 + b); gl::glBindTexture(texture->type, texture->glName); gl::glTexParameteri(texture->type, gl::GL_TEXTURE_BASE_LEVEL, texture->baseLevel); gl::glTexParameteri(texture->type, gl::GL_TEXTURE_MAX_LEVEL, texture->maxLevel); gl::glBindSampler(b, sampler.glSampler.name()); gl::glSamplerParameteri(sampler.glSampler.name(), gl::GL_TEXTURE_MIN_FILTER, (gl::GLint)texture->minFilter); gl::glSamplerParameteri(sampler.glSampler.name(), gl::GL_TEXTURE_MAG_FILTER, (gl::GLint)texture->maxFilter); gl::glUniform1i(sampler.location, b); } // Setup RenderTarget / Framebuffer Assert(m_drawImpl.framebuffer.m_impl.get(), ""); m_drawImpl.framebuffer.m_impl->bind(m_glStateManager); // Set uniforms { /* TODO Port to GLStateManager */ for (auto & uniform : m_drawImpl.uniforms) { Assert(uniform.isAssigned, "Uniform " + m_drawImpl.program->interface.uniformByLocation(uniform.location)->name() + " not set"); Assert(uniform.count > 0, ""); auto count = uniform.count; auto * data = uniform.blob.ptr(); auto location = uniform.location; switch (TypeToGLType(uniform.type)) { case gl::GL_INT: gl::glUniform1iv(location, count, ((const gl::GLint*)data)); break; case gl::GL_UNSIGNED_INT: gl::glUniform1uiv(location, count, ((const gl::GLuint*)data)); break; case gl::GL_INT_VEC2: { auto idata = (const gl::GLint*)data; gl::glUniform2iv(location, count, idata); break; } case gl::GL_FLOAT: gl::glUniform1fv(location, count, ((const gl::GLfloat*)data)); break; case gl::GL_FLOAT_VEC2: { auto fdata = (const gl::GLfloat*)data; gl::glUniform2fv(location, count, fdata); break; } case gl::GL_FLOAT_VEC3: { auto fdata = (const gl::GLfloat*)data; gl::glUniform3fv(location, count, fdata); break; } case gl::GL_FLOAT_VEC4: { auto fdata = (const gl::GLfloat*)data; gl::glUniform4fv(location, count, fdata); break; } case gl::GL_FLOAT_MAT4: gl::glUniformMatrix4fv(location, count, gl::GL_FALSE, (const gl::GLfloat*)data); break; default: Fail(std::string("Not implemented for type ") + uniform.type.name()); } } } // Set uniform buffers { for (auto b = 0; b < m_drawImpl.uniformBuffers.size(); b++) { auto & binding = m_drawImpl.uniformBuffers[b]; Assert(binding.engaged(), "UniformBuffer " + m_drawImpl.program->interface.uniformBlocks()[b].name() + " not bound"); auto & buffer = *binding.get().buffer; auto size = buffer.count * buffer.layout.stride(); Assert(size > binding.get().begin, "begin beyond buffer bounds"); gl::glUniformBlockBinding(m_drawImpl.program->glProgramName, b, b); gl::glBindBufferRange(gl::GL_UNIFORM_BUFFER, b, buffer.glName, binding.get().begin, buffer.layout.stride()); } } // Dispatch draw if (m_drawImpl.indexBuffer) { if (!m_drawImpl.instanceBuffers.empty()) { drawElementsInstanced(); } else { drawElements(); } } else { if (!m_drawImpl.instanceBuffers.empty()) { drawArraysInstanced(); } else { drawArrays(); } } }
void Channel::_drawHelp() { const FrameData& frameData = _getFrameData(); std::string message = frameData.getMessage(); if( !frameData.showHelp() && message.empty( )) return; applyBuffer(); applyViewport(); setupAssemblyState(); glDisable( GL_LIGHTING ); glDisable( GL_DEPTH_TEST ); glColor3f( 1.f, 1.f, 1.f ); if( frameData.showHelp( )) { const eq::Window::Font* font = getWindow()->getSmallFont(); std::string help = EVolve::getHelp(); float y = 340.f; for( size_t pos = help.find( '\n' ); pos != std::string::npos; pos = help.find( '\n' )) { glRasterPos3f( 10.f, y, 0.99f ); font->draw( help.substr( 0, pos )); help = help.substr( pos + 1 ); y -= 16.f; } // last line glRasterPos3f( 10.f, y, 0.99f ); font->draw( help ); } if( !message.empty( )) { const eq::Window::Font* font = getWindow()->getMediumFont(); const eq::Viewport& vp = getViewport(); const eq::PixelViewport& pvp = getPixelViewport(); const float width = pvp.w / vp.w; const float xOffset = vp.x * width; const float height = pvp.h / vp.h; const float yOffset = vp.y * height; const float yMiddle = 0.5f * height; float y = yMiddle - yOffset; for( size_t pos = message.find( '\n' ); pos != std::string::npos; pos = message.find( '\n' )) { glRasterPos3f( 10.f - xOffset, y, 0.99f ); font->draw( message.substr( 0, pos )); message = message.substr( pos + 1 ); y -= 22.f; } // last line glRasterPos3f( 10.f - xOffset, y, 0.99f ); font->draw( message ); } EQ_GL_CALL( resetAssemblyState( )); }
/*---------------------------------------------------------------------*//** ビューモードを設定 **//*---------------------------------------------------------------------*/ void View::setDisplayFlags(u32 dispflags, bool setViewport) { _dispflags = dispflags; if (setViewport) { applyViewport(); } }
void Camera::apply() { applyFrameBufferObject(); applyViewport(); }
void qAnimationDlg::render() { if (!m_view3d) { assert(false); return; } QString outputFilename = outputFileLineEdit->text(); //save to persistent settings { QSettings settings; settings.beginGroup("qAnimation"); settings.setValue("filename", outputFilename); settings.endGroup(); } setEnabled(false); //count the total number of frames int frameCount = countFrames(0); int fps = fpsSpinBox->value(); int superRes = superResolutionSpinBox->value(); //show progress dialog QProgressDialog progressDialog(QString("Frames: %1").arg(frameCount), "Cancel", 0, frameCount, this); progressDialog.setWindowTitle("Render"); progressDialog.show(); QApplication::processEvents(); #ifdef QFFMPEG_SUPPORT //get original viewport size QSize originalViewSize = m_view3d->size(); //hack: as the encoder requires that the video dimensions are multiples of 8, we resize the window a little bit... { //find the nearest multiples of 8 QSize customSize = originalViewSize; if (originalViewSize.width() % 8 || originalViewSize.height() % 8) { if (originalViewSize.width() % 8) customSize.setWidth((originalViewSize.width() / 8 + 1) * 8); if (originalViewSize.height() % 8) customSize.setHeight((originalViewSize.height() / 8 + 1) * 8); m_view3d->resize(customSize); QApplication::processEvents(); } } int bitrate = bitrateSpinBox->value() * 1024; int gop = fps; QVideoEncoder encoder(outputFilename, m_view3d->width(), m_view3d->height(), bitrate, gop, static_cast<unsigned>(fpsSpinBox->value())); QString errorString; if (!encoder.open(&errorString)) { QMessageBox::critical(this, "Error", QString("Failed to open file for output: %1").arg(errorString)); setEnabled(true); return; } #endif bool lodWasEnabled = m_view3d->isLODEnabled(); m_view3d->setLODEnabled(false); int frameIndex = 0; bool success = true; size_t vp1 = 0, vp2 = 0; while (getNextSegment(vp1, vp2)) { Step& step1 = m_videoSteps[vp1]; Step& step2 = m_videoSteps[vp2]; ViewInterpolate interpolator(step1.viewport, step2.viewport); int frameCount = static_cast<int>( fps * step1.duration_sec ); interpolator.setMaxStep(frameCount); cc2DViewportObject current_params; while ( interpolator.nextView( current_params ) ) { applyViewport ( ¤t_params ); //render to image QImage image = m_view3d->renderToImage(superRes, false, false, true ); if (image.isNull()) { QMessageBox::critical(this, "Error", "Failed to grab the screen!"); success = false; break; } if (superRes > 1) { image = image.scaled(image.width()/superRes, image.height()/superRes, Qt::IgnoreAspectRatio, Qt::SmoothTransformation); } #ifdef QFFMPEG_SUPPORT if (!encoder.encodeImage(image, frameIndex, &errorString)) { QMessageBox::critical(this, "Error", QString("Failed to encode frame #%1: %2").arg(frameIndex+1).arg(errorString)); success = false; break; } #else QString filename = QString("frame_%1.png").arg(frameIndex, 6, 10, QChar('0')); QString fullPath = QDir(outputFilename).filePath(filename); if (!image.save(fullPath)) { QMessageBox::critical(this, "Error", QString("Failed to save frame #%1").arg(frameIndex+1)); success = false; break; } #endif ++frameIndex; progressDialog.setValue(frameIndex); QApplication::processEvents(); if (progressDialog.wasCanceled()) { QMessageBox::warning(this, "Warning", QString("Process has been cancelled")); success = false; break; } } if (!success) { break; } if (vp2 == 0) { //stop loop here! break; } vp1 = vp2; } m_view3d->setLODEnabled(lodWasEnabled); #ifdef QFFMPEG_SUPPORT encoder.close(); //hack: restore original size m_view3d->resize(originalViewSize); QApplication::processEvents(); #endif progressDialog.hide(); QApplication::processEvents(); if (success) { QMessageBox::information(this, "Job done", "The animation has been saved successfully"); } setEnabled(true); }
const vgm::Vec3f Camera::applyViewport( const vgm::Vec3f& vertex ) { return applyViewport( getViewport(), vertex ); }
void Channel::_drawHelp() { const FrameData& frameData = _getFrameData(); std::string message = frameData.getMessage(); if( !frameData.showHelp() && message.empty( )) return; applyBuffer(); applyViewport(); setupAssemblyState(); glLogicOp( GL_XOR ); glEnable( GL_COLOR_LOGIC_OP ); glDisable( GL_LIGHTING ); glDisable( GL_DEPTH_TEST ); glColor3f( 1.f, 1.f, 1.f ); const eq::PixelViewport& pvp = getPixelViewport(); const eq::Viewport& vp = getViewport(); const float height = pvp.h / vp.h; if( !message.empty( )) { const eq::util::BitmapFont* font = getWindow()->getMediumFont(); const float width = pvp.w / vp.w; const float xOffset = vp.x * width; const float yOffset = vp.y * height; const float yPos = 0.618f * height; float y = yPos - yOffset; for( size_t pos = message.find( '\n' ); pos != std::string::npos; pos = message.find( '\n' )) { glRasterPos3f( 10.f - xOffset, y, 0.99f ); font->draw( message.substr( 0, pos )); message = message.substr( pos + 1 ); y -= 22.f; } // last line glRasterPos3f( 10.f - xOffset, y, 0.99f ); font->draw( message ); } glMatrixMode( GL_PROJECTION ); glLoadIdentity(); applyScreenFrustum(); glMatrixMode( GL_MODELVIEW ); if( frameData.showHelp( )) { const eq::util::BitmapFont* font = getWindow()->getSmallFont(); std::string help = EqPly::getHelp(); float y = height - 16.f; for( size_t pos = help.find( '\n' ); pos != std::string::npos; pos = help.find( '\n' )) { glRasterPos3f( 10.f, y, 0.99f ); font->draw( help.substr( 0, pos )); help = help.substr( pos + 1 ); y -= 16.f; } // last line glRasterPos3f( 10.f, y, 0.99f ); font->draw( help ); } resetAssemblyState(); }