QXipIvWidget::QXipIvWidget(QWidget* parent, QGLWidget* shareWidget) : QWidget(parent) { m_cursorField = 0; m_sceneObject = 0; m_mouseDownPos = QPoint(-1, -1); m_width = 0; m_height = 0; m_root = 0; m_mouseMode = RESIZE_NONE; m_sceneManager = 0; m_isSceneManagerActive = false; setMouseTracking(true); setAutoFillBackground(false); m_sceneManager = new SoSceneManager; m_sceneManager->setGLRenderAction(new SoGLRenderAction(SbViewportRegion(512, 512))); m_sceneManager->setRenderCallback(renderSceneCbFunc_, this); m_sceneManager->setSceneGraph(ViewerCore::get()->getRootNode()); m_sceneManager->activate(); m_isSceneManagerActive = true; m_GLWidget = new QXipIvGLWidget(m_sceneManager, this, shareWidget); m_GLWidget->show(); m_GLWidget->setMouseTracking(true); QTimer* mIdleTimer = new QTimer(this); connect(mIdleTimer, SIGNAL(timeout()), this, SLOT(doIdleProcessing())); mIdleTimer->start(40); }
void SceneTexture2::render_cb(void * closure, SoSensor * sensor) { assert(closure); SceneTexture2 * me = (SceneTexture2 *) closure; SbVec2f tempsize = PUBLIC(me)->size.getValue(); SbVec2s size; size[0] = (short) tempsize[0]; size[1] = (short) tempsize[1]; int nc = 3; SoNode * scene = PUBLIC(me)->scene.getValue(); SbBool save = PUBLIC(me)->image.enableNotify(FALSE); if ( scene != NULL ) { if ( me->renderer == NULL ) { me->renderer = new SoOffscreenRenderer(SbViewportRegion(size)); me->renderer->setComponents(SoOffscreenRenderer::RGB); me->renderer->getGLRenderAction()->setTransparencyType(SoGLRenderAction::BLEND); me->prevsize = size; PUBLIC(me)->image.setValue(size, nc, NULL); } me->renderer->render(scene); unsigned char * renderbuffer = me->renderer->getBuffer(); unsigned char * imagebytes = PUBLIC(me)->image.startEditing(size, nc); memcpy(imagebytes, renderbuffer, size[0] * size[1] * nc); PUBLIC(me)->image.finishEditing(); } else { unsigned char * imagebytes = PUBLIC(me)->image.startEditing(size, nc); memset(imagebytes, 0, size[0] * size[1] * nc); PUBLIC(me)->image.finishEditing(); } PUBLIC(me)->image.enableNotify(save); if ( save ) PUBLIC(me)->image.touch(); }
// Reconfigure on changes to window dimensions. void reshape_cb(int w, int h) { int idx = winid2idx(glutGetWindow()); scenemanager[idx]->setWindowSize(SbVec2s(w, h)); scenemanager[idx]->setSize(SbVec2s(w, h)); scenemanager[idx]->setViewportRegion(SbViewportRegion(w, h)); scenemanager[idx]->scheduleRedraw(); }
/*! Default constructor. Note: passes a default SbViewportRegion to the parent constructor. */ SoLineHighlightRenderAction::SoLineHighlightRenderAction(void) : inherited(SbViewportRegion()) { PRIVATE(this)->owner = this; // need to set hlVisible here, and not in the pimpl constructor, since // "owner" is not initialized when the pimpl constructor is called this->hlVisible = TRUE; SO_ACTION_CONSTRUCTOR(SoLineHighlightRenderAction); }
void QXipIvWidget::resizeGL(int width, int height) { m_width = width; m_height = height; m_GLWidget->resize(m_width, m_height); m_sceneManager->setWindowSize(SbVec2s(m_width, m_height)); m_sceneManager->setSize(SbVec2s(m_width, m_height)); m_sceneManager->setViewportRegion(SbViewportRegion(m_width, m_height)); }
// used to render shape and non-shape nodes (usually SoGroup or SoSeparator). void SoBoxHighlightRenderActionP::drawHighlightBox(const SoPath * path) { if (this->camerasearch == NULL) { this->camerasearch = new SoSearchAction; } // find camera used to render node this->camerasearch->setFind(SoSearchAction::TYPE); this->camerasearch->setInterest(SoSearchAction::FIRST); // find first camera to break out asap this->camerasearch->setType(SoCamera::getClassTypeId()); this->camerasearch->apply(const_cast<SoPath*>(path)); if (this->camerasearch->getPath()) { this->bboxseparator->insertChild(this->camerasearch->getPath()->getTail(), 0); } this->camerasearch->reset(); if (this->bboxaction == NULL) { this->bboxaction = new SoGetBoundingBoxAction(SbViewportRegion(100, 100)); } this->bboxaction->setViewportRegion(PUBLIC(this)->getViewportRegion()); this->bboxaction->apply(const_cast<SoPath*>(path)); SbXfBox3f & box = this->bboxaction->getXfBoundingBox(); if (!box.isEmpty()) { // set cube size float x, y, z; box.getSize(x, y, z); this->bboxcube->width = x; this->bboxcube->height = y; this->bboxcube->depth = z; SbMatrix transform = box.getTransform(); // get center (in the local bbox coordinate system) SbVec3f center = box.SbBox3f::getCenter(); // if center != (0,0,0), move the cube if (center != SbVec3f(0.0f, 0.0f, 0.0f)) { SbMatrix t; t.setTranslate(center); transform.multLeft(t); } this->bboxtransform->matrix = transform; PUBLIC(this)->SoGLRenderAction::apply(this->bboxseparator); } // remove camera this->bboxseparator->removeChild(0); }
void QCtkXipSGWidget::resizeGL(int width, int height) { mWidth = width; mHeight = height; glViewport(0,0,width, height); if(mSceneManager) { mSceneManager->setWindowSize(SbVec2s(mWidth, mHeight)); mSceneManager->setSize(SbVec2s(mWidth, mHeight)); mSceneManager->setViewportRegion(SbViewportRegion(mWidth, mHeight)); } }
void SceneTexture2::size_updated_cb(void * closure, SoSensor * sensor) { assert(closure); SceneTexture2 * me = (SceneTexture2 *) closure; SbVec2f tempsize = PUBLIC(me)->size.getValue(); SbVec2s size; size[0] = (short) tempsize[0]; size[1] = (short) tempsize[1]; if ( size != me->prevsize ) { if ( me->renderer != NULL ) { me->renderer->setViewportRegion(SbViewportRegion(size)); PUBLIC(me)->image.setValue(size, 3, NULL); me->render_sensor->schedule(); } me->prevsize = size; } }
SoSeparator * createScenegraph(void) { SoSeparator * texroot = new SoSeparator; texroot->ref(); SoInput in; in.setBuffer(red_cone_iv, strlen(red_cone_iv)); SoSeparator * result = SoDB::readAll(&in); if (result == NULL) { exit(1); } SoPerspectiveCamera *myCamera = new SoPerspectiveCamera; SoRotationXYZ *rot = new SoRotationXYZ; rot->axis = SoRotationXYZ::X; rot->angle = M_PI_2; myCamera->position.setValue(SbVec3f(-0.2, -0.2, 2.0)); myCamera->scaleHeight(0.4); texroot->addChild(myCamera); texroot->addChild(new SoDirectionalLight); texroot->addChild(rot); texroot->addChild(result); myCamera->viewAll(texroot, SbViewportRegion()); // Generate the texture map SoTexture2 *texture = new SoTexture2; texture->ref(); if (generateTextureMap(texroot, texture, 128, 128)) printf ("Successfully generated texture map\n"); else printf ("Could not generate texture map\n"); texroot->unref(); // Make a scene with a cube and apply the texture to it SoSeparator * root = new SoSeparator; root->addChild(texture); root->addChild(new SoCube); return root; }
CoinRiftWidget::CoinRiftWidget() : QGLWidget() { for (int eye = 0; eye < 2; eye++) { reinterpret_cast<ovrGLTextureData*>(&eyeTexture[eye])->TexId = 0; #ifdef USE_FRAMEBUFFER frameBufferID[eye] = 0; depthBufferID[eye] = 0; #endif } // OVR will do the swapping. setAutoBufferSwap(false); hmd = ovrHmd_Create(0); if (!hmd) { qDebug() << "Could not find Rift device."; throw; } if (!ovrHmd_ConfigureTracking (hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position )) { // Capabilities we require. qDebug() << "Could not start Rift motion sensor."; throw; } resize(hmd->Resolution.w, hmd->Resolution.h); // Configure stereo settings. ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Left, hmd->DefaultEyeFov[0], 1.0f); ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Right, hmd->DefaultEyeFov[1], 1.0f); #ifdef USE_SO_OFFSCREEN_RENDERER renderer = new SoOffscreenRenderer(SbViewportRegion(std::max(recommenedTex0Size.w, recommenedTex0Size.w), std::max(recommenedTex1Size.h, recommenedTex1Size.h))); renderer->setComponents(SoOffscreenRenderer::RGB_TRANSPARENCY); BackgroundColor = SbColor(.0f, .0f, .8f); renderer->setBackgroundColor(BackgroundColor); #endif #ifdef USE_FRAMEBUFFER m_sceneManager = new SoSceneManager(); m_sceneManager->setViewportRegion(SbViewportRegion(std::max(recommenedTex0Size.w, recommenedTex0Size.w), std::max(recommenedTex1Size.h, recommenedTex1Size.h))); m_sceneManager->setBackgroundColor(SbColor(.0f, .0f, .8f)); #endif basePosition = SbVec3f(0.0f, 0.0f, -2.0f); // light handling SoDirectionalLight *light = new SoDirectionalLight(); light->direction.setValue(1,-1,-1); SoDirectionalLight *light2 = new SoDirectionalLight(); light2->direction.setValue(-1,-1,-1); light2->intensity.setValue(0.6); light2->color.setValue(0.8,0.8,1); scene = new SoSeparator(0); // Placeholder. for (int eye = 0; eye < 2; eye++) { rootScene[eye] = new SoSeparator(); rootScene[eye]->ref(); camera[eye] = new SoFrustumCamera(); camera[eye]->position.setValue(basePosition); camera[eye]->focalDistance.setValue(5.0f); camera[eye]->viewportMapping.setValue(SoCamera::LEAVE_ALONE); rootScene[eye]->addChild(camera[eye]); rootScene[eye]->addChild(light); rootScene[eye]->addChild(light2); rootScene[eye]->addChild(scene); } // Populate ovrEyeDesc[2]. eyeRenderDesc[0].Eye = ovrEye_Left; eyeRenderDesc[1].Eye = ovrEye_Right; eyeRenderDesc[0].Fov = hmd->DefaultEyeFov[0]; eyeRenderDesc[1].Fov = hmd->DefaultEyeFov[1]; #ifdef USE_SO_OFFSCREEN_RENDERER eyeTexture[0].Header.TextureSize.w = renderer->getViewportRegion().getViewportSizePixels().getValue()[0]; eyeTexture[0].Header.TextureSize.h = renderer->getViewportRegion().getViewportSizePixels().getValue()[1]; eyeTexture[1].Header.TextureSize = eyeTexture[0].Header.TextureSize; #endif #ifdef USE_FRAMEBUFFER eyeTexture[0].Header.TextureSize = recommenedTex0Size; eyeTexture[1].Header.TextureSize = recommenedTex1Size; #endif eyeTexture[0].Header.RenderViewport.Pos.x = 0; eyeTexture[0].Header.RenderViewport.Pos.y = 0; eyeTexture[0].Header.RenderViewport.Size = eyeTexture[0].Header.TextureSize; eyeTexture[1].Header.RenderViewport.Pos = eyeTexture[0].Header.RenderViewport.Pos; eyeTexture[1].Header.RenderViewport.Size = eyeTexture[1].Header.TextureSize; const int backBufferMultisample = 0; // TODO This is a guess? ovrGLConfig cfg; cfg.OGL.Header.API = ovrRenderAPI_OpenGL; cfg.OGL.Header.RTSize = hmd->Resolution; cfg.OGL.Header.Multisample = backBufferMultisample; cfg.OGL.Window = reinterpret_cast<HWND>(winId()); makeCurrent(); //cfg.OGL.WglContext = wglGetCurrentContext(); // http://stackoverflow.com/questions/17532033/qglwidget-get-gl-contextes-for-windows cfg.OGL.DC = wglGetCurrentDC(); qDebug() << "Window:" << cfg.OGL.Window; //qDebug() << "Context:" << cfg.OGL.WglContext; qDebug() << "DC:" << cfg.OGL.DC; int DistortionCaps = 0; DistortionCaps |= ovrDistortionCap_Chromatic; // DistortionCaps |= ovrDistortionCap_TimeWarp; // Produces black screen... DistortionCaps |= ovrDistortionCap_Vignette; DistortionCaps |= ovrDistortionCap_HqDistortion; bool VSyncEnabled(false); // TODO This is a guess. if (!ovrHmd_ConfigureRendering( hmd, &cfg.Config, /*(VSyncEnabled ? 0 : ovrHmdCap_NoVSync),*/ DistortionCaps, hmd->DefaultEyeFov,//eyes, eyeRenderDesc)) { qDebug() << "Could not configure OVR rendering."; throw; } static const float nearPlane = 0.01; for (int eye = 0; eye < 2; eye++) { camera[eye]->aspectRatio.setValue((eyeRenderDesc[eye].Fov.LeftTan + eyeRenderDesc[eye].Fov.RightTan) / (eyeRenderDesc[eye].Fov.UpTan + eyeRenderDesc[eye].Fov.DownTan)); camera[eye]->nearDistance.setValue(nearPlane); camera[eye]->farDistance.setValue(10000.0f); camera[eye]->left.setValue(-eyeRenderDesc[eye].Fov.LeftTan * nearPlane); camera[eye]->right.setValue(eyeRenderDesc[eye].Fov.RightTan * nearPlane); camera[eye]->top.setValue(eyeRenderDesc[eye].Fov.UpTan * nearPlane); camera[eye]->bottom.setValue(-eyeRenderDesc[eye].Fov.DownTan * nearPlane); } }
/* * Fun flux analysis */ void FluxAnalysis::RunFluxAnalysis( QString nodeURL, QString surfaceSide, unsigned long nOfRays, bool increasePhotonMap, int heightDivisions, int widthDivisions ) { m_surfaceURL = nodeURL; m_surfaceSide = surfaceSide; //Delete a photonCounts if( m_photonCounts && m_photonCounts != 0 ) { for( int h = 0; h < m_heightDivisions; h++ ) { delete[] m_photonCounts[h]; } delete[] m_photonCounts; } m_photonCounts = 0; m_heightDivisions = heightDivisions; m_widthDivisions = widthDivisions; //Check if there is a scene if ( !m_pCurrentScene ) return; //Check if there is a transmissivity defined TTransmissivity* transmissivity = 0; if ( !m_pCurrentScene->getPart( "transmissivity", false ) ) transmissivity = 0; else transmissivity = static_cast< TTransmissivity* > ( m_pCurrentScene->getPart( "transmissivity", false ) ); //Check if there is a rootSeparator InstanceNode if( !m_pRootSeparatorInstance ) return; InstanceNode* sceneInstance = m_pRootSeparatorInstance->GetParent(); if ( !sceneInstance ) return; //Check if there is a light and is properly configured if ( !m_pCurrentScene->getPart( "lightList[0]", false ) )return; TLightKit* lightKit = static_cast< TLightKit* >( m_pCurrentScene->getPart( "lightList[0]", false ) ); InstanceNode* lightInstance = sceneInstance->children[0]; if ( !lightInstance ) return; if( !lightKit->getPart( "tsunshape", false ) ) return; TSunShape* sunShape = static_cast< TSunShape * >( lightKit->getPart( "tsunshape", false ) ); if( !lightKit->getPart( "icon", false ) ) return; TLightShape* raycastingSurface = static_cast< TLightShape * >( lightKit->getPart( "icon", false ) ); if( !lightKit->getPart( "transform" ,false ) ) return; SoTransform* lightTransform = static_cast< SoTransform * >( lightKit->getPart( "transform" ,false ) ); //Check if there is a random generator is defined. if( !m_pRandomDeviate || m_pRandomDeviate== 0 ) return; //Check if the surface and the surface side defined is suitable if( CheckSurface() == false || CheckSurfaceSide() == false ) return; //Create the photon map where photons are going to be stored if( !m_pPhotonMap || !increasePhotonMap ) { if( m_pPhotonMap ) m_pPhotonMap->EndStore( -1 ); delete m_pPhotonMap; m_pPhotonMap = new TPhotonMap(); m_pPhotonMap->SetBufferSize( HUGE_VAL ); m_tracedRays = 0; m_wPhoton = 0; m_totalPower = 0; } QVector< InstanceNode* > exportSuraceList; QModelIndex nodeIndex = m_pCurrentSceneModel->IndexFromNodeUrl( m_surfaceURL ); if( !nodeIndex.isValid() ) return; InstanceNode* surfaceNode = m_pCurrentSceneModel->NodeFromIndex( nodeIndex ); if( !surfaceNode || surfaceNode == 0 ) return; exportSuraceList.push_back( surfaceNode ); //UpdateLightSize(); TSeparatorKit* concentratorRoot = static_cast< TSeparatorKit* >( m_pCurrentScene->getPart( "childList[0]", false ) ); if ( !concentratorRoot ) return; SoGetBoundingBoxAction* bbAction = new SoGetBoundingBoxAction( SbViewportRegion() ) ; concentratorRoot->getBoundingBox( bbAction ); SbBox3f box = bbAction->getXfBoundingBox().project(); delete bbAction; bbAction = 0; BBox sceneBox; if( !box.isEmpty() ) { sceneBox.pMin = Point3D( box.getMin()[0], box.getMin()[1], box.getMin()[2] ); sceneBox.pMax = Point3D( box.getMax()[0], box.getMax()[1], box.getMax()[2] ); if( lightKit ) lightKit->Update( sceneBox ); } m_pCurrentSceneModel->UpdateSceneModel(); //Compute bounding boxes and world to object transforms trf::ComputeSceneTreeMap( m_pRootSeparatorInstance, Transform( new Matrix4x4 ), true ); m_pPhotonMap->SetConcentratorToWorld( m_pRootSeparatorInstance->GetIntersectionTransform() ); QStringList disabledNodes = QString( lightKit->disabledNodes.getValue().getString() ).split( ";", QString::SkipEmptyParts ); QVector< QPair< TShapeKit*, Transform > > surfacesList; trf::ComputeFistStageSurfaceList( m_pRootSeparatorInstance, disabledNodes, &surfacesList ); lightKit->ComputeLightSourceArea( m_sunWidthDivisions, m_sunHeightDivisions, surfacesList ); if( surfacesList.count() < 1 ) return; QVector< long > raysPerThread; int maximumValueProgressScale = 100; unsigned long t1 = nOfRays/ maximumValueProgressScale; for( int progressCount = 0; progressCount < maximumValueProgressScale; ++ progressCount ) raysPerThread<< t1; if( ( t1 * maximumValueProgressScale ) < nOfRays ) raysPerThread<< ( nOfRays - ( t1* maximumValueProgressScale) ); Transform lightToWorld = tgf::TransformFromSoTransform( lightTransform ); lightInstance->SetIntersectionTransform( lightToWorld.GetInverse() ); // Create a progress dialog. QProgressDialog dialog; dialog.setLabelText( QString("Progressing using %1 thread(s)..." ).arg( QThread::idealThreadCount() ) ); // Create a QFutureWatcher and conncect signals and slots. QFutureWatcher< void > futureWatcher; QObject::connect(&futureWatcher, SIGNAL(finished()), &dialog, SLOT(reset())); QObject::connect(&dialog, SIGNAL(canceled()), &futureWatcher, SLOT(cancel())); QObject::connect(&futureWatcher, SIGNAL(progressRangeChanged(int, int)), &dialog, SLOT(setRange(int, int))); QObject::connect(&futureWatcher, SIGNAL(progressValueChanged(int)), &dialog, SLOT(setValue(int))); QMutex mutex; QMutex mutexPhotonMap; QFuture< void > photonMap; if( transmissivity ) photonMap = QtConcurrent::map( raysPerThread, RayTracer( m_pRootSeparatorInstance, lightInstance, raycastingSurface, sunShape, lightToWorld, transmissivity, *m_pRandomDeviate, &mutex, m_pPhotonMap, &mutexPhotonMap, exportSuraceList ) ); else photonMap = QtConcurrent::map( raysPerThread, RayTracerNoTr( m_pRootSeparatorInstance, lightInstance, raycastingSurface, sunShape, lightToWorld, *m_pRandomDeviate, &mutex, m_pPhotonMap, &mutexPhotonMap, exportSuraceList ) ); futureWatcher.setFuture( photonMap ); // Display the dialog and start the event loop. dialog.exec(); futureWatcher.waitForFinished(); m_tracedRays += nOfRays; double irradiance = sunShape->GetIrradiance(); double inputAperture = raycastingSurface->GetValidArea(); m_wPhoton = double ( inputAperture * irradiance ) / m_tracedRays; UpdatePhotonCounts(); }
SIM::Coin3D::Quarter::SoQTQuarterAdaptor::SoQTQuarterAdaptor(QGLContext* context, QWidget* parent, const QGLWidget* sharewidget, Qt::WindowFlags f) : QuarterWidget(context, parent, sharewidget, f), matrixaction(SbViewportRegion(100,100)) { init(); }
void SoSceneTextureCubeMapP::updatePBuffer(SoState * state, const float quality) { SbVec2s size = PUBLIC(this)->size.getValue(); assert(PUBLIC(this)->scene.getValue()); if ((this->glcontext && this->glcontextsize != size) || (size == SbVec2s(0,0))) { if (this->glimage) { this->glimage->unref(state); this->glimage = NULL; } if (this->glcontext) { cc_glglue_context_destruct(this->glcontext); this->glcontextsize.setValue(-1,-1); this->glcontext = NULL; } if (this->glaction) { delete this->glaction; this->glaction = NULL; } this->glimagevalid = FALSE; } if (size == SbVec2s(0,0)) return; // FIXME: temporary until non power of two textures are supported, // pederb 2003-12-05 size[0] = (short) coin_geq_power_of_two(size[0]); size[1] = (short) coin_geq_power_of_two(size[1]); if (this->glcontext == NULL) { this->glcontextsize = size; // disabled until an pbuffer extension is available to create a // render-to-texture pbuffer that has a non power of two size. // pederb, 2003-12-05 if (1) { // if (!glue->has_ext_texture_rectangle) { this->glcontextsize[0] = (short) coin_geq_power_of_two(size[0]); this->glcontextsize[1] = (short) coin_geq_power_of_two(size[1]); if (this->glcontextsize != size) { static int didwarn = 0; if (!didwarn) { SoDebugError::postWarning("SoSceneTextureCubeMapP::updatePBuffer", "Requested non power of two size, " "but your OpenGL driver lacks support " "for such pbuffer textures."); didwarn = 1; } } } this->glrectangle = FALSE; if (!coin_is_power_of_two(this->glcontextsize[0]) || !coin_is_power_of_two(this->glcontextsize[1])) { // we only get here if the OpenGL driver can handle non power of // two textures/pbuffers. this->glrectangle = TRUE; } // FIXME: make it possible to specify what kind of context you want // (RGB or RGBA, I guess). pederb, 2003-11-27 unsigned int x = this->glcontextsize[0]; unsigned int y = this->glcontextsize[1]; this->glcontext = cc_glglue_context_create_offscreen(x, y); this->canrendertotexture = cc_glglue_context_can_render_to_texture(this->glcontext); if (!this->glaction) { this->contextid = (int)SoGLCacheContextElement::getUniqueCacheContext(); this->glaction = new SoGLRenderAction(SbViewportRegion(this->glcontextsize)); this->glaction-> addPreRenderCallback(SoSceneTextureCubeMapP::prerendercb, (void*) PUBLIC(this)); } else { this->glaction-> setViewportRegion(SbViewportRegion(this->glcontextsize)); } this->glaction->setCacheContext(this->contextid); this->glimagevalid = FALSE; } if (!this->pbuffervalid) { assert(this->glaction != NULL); assert(this->glcontext != NULL); this->glaction->setTransparencyType((SoGLRenderAction::TransparencyType) SoShapeStyleElement::getTransparencyType(state)); cc_glglue_context_make_current(this->glcontext); glEnable(GL_DEPTH_TEST); if (!this->canrendertotexture) { SbVec2s size = this->glcontextsize; int cubeSideSize = size[0]*size[1]*4; int reqbytes = cubeSideSize*6; // 6 cube sides if (reqbytes > this->offscreenbuffersize) { delete[] this->offscreenbuffer; this->offscreenbuffer = new unsigned char[reqbytes]; this->offscreenbuffersize = reqbytes; } unsigned char * cubeSidePtr = this->offscreenbuffer; for (int i=0; i<6; i++) { this->glaction->apply(this->updateCamera((SoGLCubeMapImage::Target)i)); glFlush(); glPixelStorei(GL_PACK_ALIGNMENT, 1); glReadPixels(0,0,size[0],size[1],GL_RGBA,GL_UNSIGNED_BYTE,cubeSidePtr); glPixelStorei(GL_PACK_ALIGNMENT, 4); cubeSidePtr += cubeSideSize; } } cc_glglue_context_reinstate_previous(this->glcontext); } if (!this->glimagevalid || (this->glimage == NULL)) { // just delete old glimage if (this->glimage) { this->glimage->unref(state); this->glimage = NULL; } this->glimage = new SoGLCubeMapImage; uint32_t flags = this->glimage->getFlags(); if (this->glrectangle) { flags |= SoGLImage::RECTANGLE; } switch ((SoSceneTextureCubeMap::TransparencyFunction) (PUBLIC(this)->transparencyFunction.getValue())) { case SoSceneTextureCubeMap::NONE: flags |= SoGLImage::FORCE_TRANSPARENCY_FALSE|SoGLImage::FORCE_ALPHA_TEST_FALSE; break; case SoSceneTextureCubeMap::ALPHA_TEST: flags |= SoGLImage::FORCE_TRANSPARENCY_TRUE|SoGLImage::FORCE_ALPHA_TEST_TRUE; break; case SoSceneTextureCubeMap::ALPHA_BLEND: flags |= SoGLImage::FORCE_TRANSPARENCY_TRUE|SoGLImage::FORCE_ALPHA_TEST_FALSE; break; default: assert(0 && "should not get here"); break; } this->glimage->setFlags(flags); if (this->canrendertotexture) { // FIXME: not implemented yet - 20050427 martin // bind texture to pbuffer this->glimage->setPBuffer(state, this->glcontext, translateWrap((SoSceneTextureCubeMap::Wrap)PUBLIC(this)->wrapS.getValue()), translateWrap((SoSceneTextureCubeMap::Wrap)PUBLIC(this)->wrapT.getValue()), quality); } } if (!this->canrendertotexture && !this->pbuffervalid) { assert(this->glimage); assert(this->offscreenbuffer); int cubeSideSize = this->glcontextsize[0] * this->glcontextsize[1] * 4; unsigned char * cubeSidePtr = this->offscreenbuffer; // FIXME: what about wrapS, wrapT, wrapR, and quality? - martin 20050427 for (int i=0; i<6; i++) { this->glimage->setCubeMapImage((SoGLCubeMapImage::Target)i, cubeSidePtr, this->glcontextsize, 4); cubeSidePtr += cubeSideSize; } } this->glimagevalid = TRUE; this->pbuffervalid = TRUE; }
/*! Default constructor. Note: passes a default SbViewportRegion to the parent constructor. */ SoBoxHighlightRenderAction::SoBoxHighlightRenderAction(void) : inherited(SbViewportRegion()) { this->init(); }
int main(int argc, char ** argv) { if ( argc != 3 ) { fprintf(stderr, "Usage: %s <infile.iv> <outfile.iv>\n", argv[0]); return -1; } SoDB::init(); SoNodeKit::init(); SoInteraction::init(); SoGenerateSceneGraphAction::initClass(); SoTweakAction::initClass(); SoInput in; SoNode * scene, * graph; if ( !in.openFile(argv[1]) ) { fprintf(stderr, "%s: error opening \"%s\" for reading.\n", argv[0], argv[1]); return -1; } scene = SoDB::readAll(&in); if ( scene == NULL ) { fprintf(stderr, "%s: error parsing \"%s\"\n", argv[0], argv[1]); return -1; } scene->ref(); SoGenerateSceneGraphAction action; // action.setDropTypeIfNameEnabled(TRUE); action.apply(scene); graph = action.getGraph(); if ( graph == NULL ) { fprintf(stderr, "%s: error generating scene graph\n", argv[0]); return -1; } graph->ref(); scene->unref(); scene = NULL; // figure out camera settings and needed rendering canvas size SoGetBoundingBoxAction bbaction(SbViewportRegion(64,64)); // just something bbaction.apply(graph); SbBox3f bbox = bbaction.getBoundingBox(); SbVec3f min = bbox.getMin(); SbVec3f max = bbox.getMax(); float bwidth = max[0] - min[0]; float bheight = max[1] - min[1]; // fprintf(stdout, "min: %g %g %g\n", min[0], min[1], min[2]); // fprintf(stdout, "max: %g %g %g\n", max[0], max[1], max[2]); // place camera SoSearchAction search; search.setType(SoCamera::getClassTypeId()); search.setInterest(SoSearchAction::FIRST); search.apply(graph); SoPath * campath = search.getPath(); SoOrthographicCamera * cam = (SoOrthographicCamera *) campath->getTail(); assert(cam != NULL); SbVec3f pos = cam->position.getValue(); cam->position.setValue(SbVec3f(min[0] + ((max[0]-min[0])/2.0), min[1] + ((max[1]-min[1])/2.0), pos[2])); cam->height.setValue(bheight); if ( TRUE ) { // FIXME: only write .iv-scene if asked SoOutput out; if ( !out.openFile(argv[2]) ) { fprintf(stderr, "%s: error opening \"%s\" for writing.\n", argv[0], argv[2]); return -1; } SoWriteAction writer(&out); // writer.setCoinFormattingEnabled(TRUE); writer.apply(graph); } int width = (int) ceil(bwidth * 150.0) + 2; int height = (int) ceil(bheight * 150.0); fprintf(stderr, "image: %d x %d\n", width, height); if ( TRUE ) { // FIXME: only write image if asked SoOffscreenRenderer renderer(SbViewportRegion(width, height)); SoGLRenderAction * glra = renderer.getGLRenderAction(); glra->setNumPasses(9); // FIXME: auto-crop image afterwards? seems like it's a perfect fit right now renderer.setComponents(SoOffscreenRenderer::RGB_TRANSPARENCY); renderer.setBackgroundColor(SbColor(1.0,1.0,1.0)); renderer.render(graph); // FIXME: support command line option filename // FIXME: also support .eps renderer.writeToFile("output.png", "png"); } graph->unref(); return 0; }