void octreeVizNode::draw( M3dView & view, const MDagPath & /*path*/, M3dView::DisplayStyle style, M3dView::DisplayStatus status ) { MDagPath camera; view = M3dView::active3dView(); view.getCamera(camera); MATRIX44F mat; double clipNear, clipFar, fov; int ispersp; parseCamera(camera, mat, clipNear, clipFar, fov, ispersp); view.beginGL(); glPushAttrib(GL_ALL_ATTRIB_BITS); glShadeModel(GL_SMOOTH); glPointSize(3); if(m_pTex) { XYZ ori(0,0,0); m_pTex->setProjection(mat, fov, ispersp); int port[4]; glGetIntegerv(GL_VIEWPORT, port); m_pTex->setPortWidth(port[2]); m_pTex->drawCube(); m_pTex->testRaster(ori); } glPopAttrib(); view.endGL(); }
// Load the textures, update the necessary variable values, initialize register combiners, // save and load the matrices with the proper values // MStatus hwRefractReflectShader_NV20::preDraw(const MDrawRequest& request, M3dView& view) { MStatus stat = loadTextures( request, view); if( MS::kSuccess != stat ) return stat; // get the reflectivity value // MPlug tPlug(thisMObject(), reflectivity); if( tPlug.getValue( fReflectivity ) ) { if( fReflectivity < 0.01f ) fReflectivity = 0.01f; if( fReflectivity > 1.0f ) fReflectivity = 1.0f; } else fReflectivity = 0.5f; // get the refraction index value // MPlug rPlug(thisMObject(), refractionIndex); if( rPlug.getValue( fRefractionIndex ) ) { if ( fRefractionIndex < 1.0f ) fRefractionIndex = 1.0f; if ( fRefractionIndex > 2.0f ) fRefractionIndex = 2.0f; } else fRefractionIndex = 1.0f; initCombiners( request, view ); // Compute the camera rotation angle and axis // MDagPath cameraPath; MStatus status = view.getCamera( cameraPath ); MMatrix mmatrix = cameraPath.inclusiveMatrix( &status ); MTransformationMatrix tmatrix( mmatrix ); MQuaternion camRotation = tmatrix.rotation(); MVector camAxis; double camTheta; camRotation.getAxisAngle( camAxis, camTheta ); // Convert to degrees from radians camTheta *= 57.295779513082320876798154814105; // == (180 / M_PI) view.beginGL(); glMatrixMode( GL_TEXTURE ); glPushMatrix(); glLoadIdentity(); glScalef(1.0, -1.0, 1.0); glRotated( camTheta, camAxis[0], camAxis[1], camAxis[2]); glMatrixMode( GL_MODELVIEW ); view.endGL(); return stat; }
MStatus PluginTestUserOperation::execute(const MHWRender::MDrawContext & drawContext) { //return MStatus::kSuccess; M3dView view; if(M3dView::getM3dViewFromModelPanel(panelName, view) == MStatus::kSuccess) { // Get the current viewport and scale it relative to that // int targetW, targetH; drawContext.getRenderTargetSize(targetW, targetH); // Some user drawing of scene bounding boxes // MDagPath cameraPath; MFnCamera fnCamera; view.getCamera(cameraPath); MMatrix m3dViewProjection, m3dViewModelView; view.projectionMatrix(m3dViewProjection); view.modelViewMatrix(m3dViewModelView); MFloatMatrix m3dFloatViewProjection(m3dViewProjection.matrix); MFloatMatrix m3dFloatViewModelView(m3dViewModelView.matrix); MFloatMatrix viewProjection = m3dFloatViewModelView * m3dFloatViewProjection; SurfaceDrawTraversal traversal; traversal.enableFiltering(true); traversal.setFrustum(cameraPath, targetW, targetH); traversal.traverse(); unsigned int numItems = traversal.numberOfItems(); MFnMesh fnMesh; for (int i = 0; i < numItems; i++) { MDagPath path; traversal.itemPath(i, path); if (path.hasFn(MFn::kMesh)) { fnMesh.setObject(path); MFloatMatrix modelWorld(path.inclusiveMatrix().matrix); MTransformationMatrix transformMatrix; MFloatMatrix modelViewProjection = modelWorld * viewProjection; modelViewProjection = modelViewProjection.transpose(); MIntArray triangleCounts; MIntArray triangleVertices; // This is the index list for all the triangles in the mesh in one big list. Ie. first 3 are for tri 1 etc. Index into getPoints() fnMesh.getTriangles(triangleCounts, triangleVertices); //int indices[100]; //triangleVertices.get(indices); MFloatPointArray vertexArray; //float points[1000][4]; fnMesh.getPoints(vertexArray); //vertexArray.get(points); UserSceneRenderer::get()->render(triangleVertices, vertexArray, modelViewProjection); } } } return MStatus::kSuccess; }
// Camera override const MHWRender::MCameraOverride * viewRenderUserOperation::cameraOverride() { if (fUserCameraOverride) { M3dView mView; if (mPanelName.length() && (M3dView::getM3dViewFromModelPanel(mPanelName, mView) == MStatus::kSuccess)) { mView.getCamera( mCameraOverride.mCameraPath ); return &mCameraOverride; } } return NULL; }
MStatus moveContext::doPress( MEvent & event ) { MStatus stat = MPxSelectionContext::doPress( event ); MSpace::Space spc = MSpace::kWorld; // If we are not in selecting mode (i.e. an object has been selected) // then set up for the translation. // if ( !isSelecting() ) { event.getPosition( startPos_x, startPos_y ); view = M3dView::active3dView(); MDagPath camera; stat = view.getCamera( camera ); if ( stat != MS::kSuccess ) { cerr << "Error: M3dView::getCamera" << endl; return stat; } MFnCamera fnCamera( camera ); MVector upDir = fnCamera.upDirection( spc ); MVector rightDir = fnCamera.rightDirection( spc ); // Determine the camera used in the current view // if ( fnCamera.isOrtho() ) { if ( upDir.isEquivalent(MVector::zNegAxis,kVectorEpsilon) ) { currWin = TOP; } else if ( rightDir.isEquivalent(MVector::xAxis,kVectorEpsilon) ) { currWin = FRONT; } else { currWin = SIDE; } } else { currWin = PERSP; } // Create an instance of the move tool command. // cmd = (moveCmd*)newToolCommand(); cmd->setVector( 0.0, 0.0, 0.0 ); } return stat; }
MStatus CMayaManager::BindViewerToPanel (const char* strView) { //HRESULT hr= S_OK; HWND renderwnd= NULL; MDagPath MayaCamera; if(strView == NULL) strView= ""; StringCchCopyA(m_ViewerBinding, MAX_PATH, strView); if(strView && (strView[0] != '\0')) { if(0 == lstrcmpiA(strView, "floating")) { g_Viewer.BindToWindow(NULL, true); } else { M3dView ourView; M3dView::get3dView(0,ourView); for(UINT iView= 0; iView < M3dView::numberOf3dViews(); iView++) { M3dView::get3dView(iView, ourView); ourView.getCamera(MayaCamera); MayaCamera.pop(); if(MayaCamera.partialPathName() == MString(strView)) { renderwnd= (HWND)ourView.window(); g_Viewer.BindToWindow(ourView.window(), true); break; } } } } //e_Exit: return MS::kSuccess; }
MStatus RadiosityRenderer::doIt(const MArgList &args) { this->args = args; windowWidth = 640; windowHeight = 480; SDL_Window *window = SDL_CreateWindow( "Radiosity Renderer Viewport", 0, 0, 640, 480, SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE ); SDL_GLContext glcontext = SDL_GL_CreateContext(window); glClearColor(0,0,0,1); glClear(GL_COLOR_BUFFER_BIT); M3dView curView = M3dView::active3dView(); MDagPath camDagPath; curView.getCamera( camDagPath ); IterateThroughDag(); //Pass DAG to renderer, let renderer render scene... SDL_GL_MakeCurrent(window, glcontext); GLRenderer renderer = GLRenderer(640,480); renderer.RenderToScreen(); SDL_GL_SwapWindow(window); //Write pixels to render window... prepareRenderView(); SDL_GL_MakeCurrent(window, glcontext); renderBufferToRenderView(); sleep(1); SDL_GL_DeleteContext(glcontext); SDL_DestroyWindow(window); SDL_Quit(); return MS::kSuccess; }
MStatus RadiosityRenderer::prepareRenderView() { if (!MRenderView::doesRenderEditorExist()) { setResult( "Cannot renderViewRender in batch render mode. " "Please run in interactive mode, " "so that the render editor exists." ); return MS::kFailure; } else { printf("Past doesRenderEditorExist()"); } // get optional flags MArgDatabase argData( syntax(), args ); parseSyntax( argData ); M3dView curView = M3dView::active3dView(); MDagPath camDagPath; curView.getCamera( camDagPath ); printf("Rendering camera: %s", camDagPath.fullPathName().asChar()); if( MRenderView::setCurrentCamera( camDagPath ) != MS::kSuccess ) { setResult( "renderViewRender: error occurred in setCurrentCamera." ); return MS::kFailure; } if (MRenderView::startRender( windowWidth, windowHeight, doNotClearBackground) != MS::kSuccess) { setResult( "renderViewRender: error occured in startRender." ); return MS::kFailure; } return MS::kSuccess; }
bool SceneShapeUI::snap( MSelectInfo &snapInfo ) const { MStatus s; if( snapInfo.displayStatus() != M3dView::kHilite ) { MSelectionMask meshMask( MSelectionMask::kSelectMeshes ); if( !snapInfo.selectable( meshMask ) ) { return false; } } // early out if we have no scene to draw SceneShape *sceneShape = static_cast<SceneShape *>( surfaceShape() ); const IECore::SceneInterface *sceneInterface = sceneShape->getSceneInterface().get(); if( !sceneInterface ) { return false; } IECoreGL::ConstScenePtr scene = sceneShape->glScene(); if( !scene ) { return false; } // Get the viewport that the snapping operation is taking place in. M3dView view = snapInfo.view(); // Use an IECoreGL::Selector to find the point in world space that we wish to snap to. // We do this by first getting the origin of the selection ray and transforming it into // NDC space using the OpenGL projection and transformation matrices. Once we have the // point in NDC we can use it to define the viewport that the IECoreGL::Selector will use. MPoint localRayOrigin; MVector localRayDirection; snapInfo.getLocalRay( localRayOrigin, localRayDirection ); Imath::V3d org( localRayOrigin[0], localRayOrigin[1], localRayOrigin[2] ); MDagPath camera; view.getCamera( camera ); MMatrix localToCamera = snapInfo.selectPath().inclusiveMatrix() * camera.inclusiveMatrix().inverse(); view.beginSelect(); Imath::M44d projectionMatrix; glGetDoublev( GL_PROJECTION_MATRIX, projectionMatrix.getValue() ); view.endSelect(); double v[4][4]; localToCamera.get( v ); Imath::M44d cam( v ); Imath::V3d ndcPt3d = ( (org * cam ) * projectionMatrix + Imath::V3d( 1. ) ) * Imath::V3d( .5 ); Imath::V2d ndcPt( std::max( std::min( ndcPt3d[0], 1. ), 0. ), 1. - std::max( std::min( ndcPt3d[1], 1. ), 0. ) ); view.beginGL(); glMatrixMode( GL_PROJECTION ); glLoadMatrixd( projectionMatrix.getValue() ); float radius = .001; // The radius of the selection area in NDC. double aspect = double( view.portWidth() ) / view.portHeight(); Imath::V2f selectionWH( radius, radius * aspect ); std::vector<IECoreGL::HitRecord> hits; { IECoreGL::Selector selector( Imath::Box2f( ndcPt - selectionWH, ndcPt + selectionWH ), IECoreGL::Selector::IDRender, hits ); IECoreGL::State::bindBaseState(); selector.baseState()->bind(); scene->render( selector.baseState() ); } view.endGL(); if( hits.empty() ) { return false; } // Get the closest mesh hit. float depthMin = std::numeric_limits<float>::max(); int depthMinIndex = -1; for( unsigned int i=0, e = hits.size(); i < e; i++ ) { if( hits[i].depthMin < depthMin ) { depthMin = hits[i].depthMin; depthMinIndex = i; } } // Get the absolute path of the hit object. IECore::SceneInterface::Path objPath; std::string objPathStr; sceneInterface->path( objPath ); IECore::SceneInterface::pathToString( objPath, objPathStr ); objPathStr += IECoreGL::NameStateComponent::nameFromGLName( hits[depthMinIndex].name ); IECore::SceneInterface::stringToPath( objPathStr, objPath ); // Validate the hit selection. IECore::ConstSceneInterfacePtr childInterface; try { childInterface = sceneInterface->scene( objPath ); } catch(...) { return false; } if( !childInterface ) { return false; } if( !childInterface->hasObject() ) { return false; } // Get the mesh primitive so that we can query it's vertices. double time = sceneShape->time(); IECore::ConstObjectPtr object = childInterface->readObject( time ); IECore::ConstMeshPrimitivePtr meshPtr = IECore::runTimeCast<const IECore::MeshPrimitive>( object.get() ); if ( !meshPtr ) { return false; } // Calculate the snap point in object space. MPoint worldIntersectionPoint; selectionRayToWorldSpacePoint( camera, snapInfo, depthMin, worldIntersectionPoint ); Imath::V3f pt( worldIntersectionPoint[0], worldIntersectionPoint[1], worldIntersectionPoint[2] ); Imath::M44f objToWorld( worldTransform( childInterface.get(), time ) ); pt = pt * objToWorld.inverse(); // Get the list of vertices in the mesh. IECore::V3fVectorData::ConstPtr pointData( meshPtr->variableData<IECore::V3fVectorData>( "P", IECore::PrimitiveVariable::Vertex ) ); const std::vector<Imath::V3f> &vertices( pointData->readable() ); // Find the vertex that is closest to the snap point. Imath::V3d closestVertex; float closestDistance = std::numeric_limits<float>::max(); for( std::vector<Imath::V3f>::const_iterator it( vertices.begin() ); it != vertices.end(); ++it ) { Imath::V3d vert( *it ); float d( ( pt - vert ).length() ); // Calculate the distance between the vertex and the snap point. if( d < closestDistance ) { closestDistance = d; closestVertex = vert; } } // Snap to the vertex. closestVertex *= objToWorld; snapInfo.setSnapPoint( MPoint( closestVertex[0], closestVertex[1], closestVertex[2] ) ); return true; }
MStatus renderViewRenderRegion::doIt( const MArgList& args ) // // Description: // Implements the MEL renderViewRenderRegion command. This command // fills the currently selected Render Region with a circular blue // and white pattern. It assumes that the Render View is currently // displaying a 640x480 image (if it isn't, then this command will // resize the Render View to 640x480). // // Arguments: // args - the argument list that was passes to the command from MEL. // -background/-b renders the pattern without clearing the region // // Return Value: // MS::kSuccess - command succeeded // MS::kFailure - command failed (returning this value will cause the // MEL script that is being run to terminate unless the // error is caught using a "catch" statement. // { MStatus stat = MS::kSuccess; unsigned int resX = 640; unsigned int resY = 480; // Check if the render view exists. It should always exist, unless // Maya is running in batch mode. // if (!MRenderView::doesRenderEditorExist()) { setResult( "Cannot renderViewRenderRegion in batch render mode. " "Please run in interactive mode, " "so that the render editor exists." ); return MS::kFailure; } // get optional flags MArgDatabase argData( syntax(), args ); parseSyntax( argData ); // Pick a camera, and tell the Render View that we will be rendering // from its point of view. Just use the camera for the active // modelling view. // M3dView curView = M3dView::active3dView(); MDagPath camDagPath; curView.getCamera( camDagPath ); cout<<"Region rendering camera"<<camDagPath.fullPathName().asChar()<<endl; if( MRenderView::setCurrentCamera( camDagPath ) != MS::kSuccess ) { setResult( "renderViewRenderRegion: error occurred in setCurrentCamera." ); return MS::kFailure; } // Retrieve the dimensions of the currently selected Render Region. // unsigned int regionLeft, regionRight, regionBottom, regionTop; stat = MRenderView::getRenderRegion( regionLeft, regionRight, regionBottom, regionTop ); if( stat != MS::kSuccess ) { setResult( "renderViewRenderRegion: error occurred in getRenderRegion." ); return MS::kFailure; } // Assume that the full rendered image is 640x480, and tell the // Render View that we're about to start rendering the given region. // stat = MRenderView::startRegionRender( resX, resY, regionLeft, regionRight, regionBottom, regionTop, doNotClearBackground ); if( stat == MS::kSuccess ) { cout<<"Rendering Region ("<<regionLeft<<","<<regionBottom <<") -> ("<<regionRight<<","<<regionTop<<")"<<endl; unsigned int width = regionRight - regionLeft + 1; unsigned int height = regionTop - regionBottom + 1; unsigned int numPixels = width * height; unsigned int middleX = width / 2; unsigned int middleY = height / 2; // Allocate buffer to store the region // RV_PIXEL* pixels = new RV_PIXEL[numPixels]; // Fill the region buffer with a circular blue/white pattern centred on // the middle of the region // for( unsigned int x = 0; x < width; x++ ) { for( unsigned int y = 0; y < height; y++ ) { int index = y*width + x; int xCoord = x - middleX; int yCoord = y - middleY; pixels[index] = evaluate( xCoord, yCoord ); } } // Send the pixel data to the Render View. // stat = MRenderView::updatePixels( regionLeft, regionRight, regionBottom, regionTop, pixels ); if( stat != MS::kSuccess ) { setResult( "renderViewRenderRegion: error occurred in updatePixels." ); return MS::kFailure; } // Force a refresh of the region in the Render View window. // stat = MRenderView::refresh( regionLeft, regionRight, regionBottom, regionTop ); if( stat != MS::kSuccess ) { setResult( "renderViewRenderRegion: error occurred in refresh." ); return MS::kFailure; } } else { setResult( "renderViewRenderRegion: error occurred in startRegionRender." ); return MS::kFailure; } // Notify the Render View that we are done rendering the region. // stat = MRenderView::endRender(); if( stat != MS::kSuccess ) { setResult( "renderViewRenderRegion: error occurred in endRender." ); return MS::kFailure; } setResult( "renderViewRenderRegion completed." ); return stat; }
HRESULT CMayaManager::PerspectiveCamera_Synchronize() { MDagPath MayaCamera; M3dView panel; for(UINT iView= 0; iView < M3dView::numberOf3dViews(); iView++) { D3DXMATRIX mCamera; M3dView::get3dView(iView, panel); panel.getCamera(MayaCamera); MayaCamera.pop(); MString perspNameStr( "persp" ); MString cameraNameStr = MayaCamera.partialPathName(); cameraNameStr = cameraNameStr.substring(0, perspNameStr.length()-1 ); const char* cameraName= cameraNameStr.asChar(); if(cameraNameStr == perspNameStr ) { MayaCamera.extendToShape(); MFloatMatrix fView(MayaCamera.inclusiveMatrix().matrix ); ConvertWorldMatrix(mCamera, fView); panel.getCamera(MayaCamera); MFnCamera fnMayaCamera(MayaCamera.node()); MVector mUp= fnMayaCamera.upDirection(); MVector mAt= fnMayaCamera.viewDirection(); MPoint mEye= fnMayaCamera.eyePoint(MSpace::kWorld); D3DXVECTOR3 dxEye( (float)mEye.x, (float)mEye.y, (float)-mEye.z ); D3DXVECTOR3 dxAt( (float)mAt.x, (float)mAt.y, (float)-mAt.z ); D3DXVECTOR3 dxUp( (float)mUp.x, (float)mUp.y, (float)-mUp.z ); D3DXVECTOR4 fEye; D3DXVECTOR4 fAt; D3DXVECTOR3 fUp; D3DXVec3Transform(&fEye, &dxEye,(D3DXMATRIX*)&mCamera); D3DXVec3Transform(&fAt, &dxAt,(D3DXMATRIX*)&mCamera); D3DXVec3TransformNormal(&fUp, &dxUp,(D3DXMATRIX*)&mCamera); D3DXMatrixLookAtLH(&PerspectiveCamera_View, (D3DXVECTOR3*)&fEye, (D3DXVECTOR3*)&fAt, &fUp); // Projection matrix float zNear = (float)fnMayaCamera.nearClippingPlane(); float zFar = (float)fnMayaCamera.farClippingPlane(); float hFOV = (float)fnMayaCamera.horizontalFieldOfView(); float f = (float) (1.0f / (float) tan( hFOV / 2.0f )); ZeroMemory( &PerspectiveCamera_Projection, sizeof(PerspectiveCamera_Projection) ); PerspectiveCamera_Projection._11 = f; PerspectiveCamera_Projection._22 = f; PerspectiveCamera_Projection._33 = (zFar+zNear) / (zFar-zNear); PerspectiveCamera_Projection._34 = 1.0f; PerspectiveCamera_Projection._43 = -2 * (zFar*zNear)/(zFar-zNear); break; } } return S_OK; }
bool SceneShapeUI::select( MSelectInfo &selectInfo, MSelectionList &selectionList, MPointArray &worldSpaceSelectPts ) const { MStatus s; // early out if we're not selectable. we always allow components to be selected if we're highlighted, // but we don't allow ourselves to be selected as a whole unless meshes are in the selection mask. // it's not ideal that we act like a mesh, but it's at least consistent with the drawing mask we use. if( selectInfo.displayStatus() != M3dView::kHilite ) { MSelectionMask meshMask( MSelectionMask::kSelectMeshes ); // Apparently selectInfo.selectable() still returns true when meshes are not // displayed by the M3dView, so we are also testing the objectDisplay status. // This was last confirmed in Maya 2014, and is presumably a Maya bug. if( !selectInfo.selectable( meshMask ) || !selectInfo.objectDisplayStatus( M3dView::kDisplayMeshes ) ) { return false; } } // early out if we have no scene to draw SceneShape *sceneShape = static_cast<SceneShape *>( surfaceShape() ); if( !sceneShape->getSceneInterface() ) { return false; } IECoreGL::ConstScenePtr scene = sceneShape->glScene(); if( !scene ) { return false; } // we want to perform the selection using an IECoreGL::Selector, so we // can avoid the performance penalty associated with using GL_SELECT mode. // that means we don't really want to call view.beginSelect(), but we have to // call it just to get the projection matrix for our own selection, because as far // as I can tell, there is no other way of getting it reliably. M3dView view = selectInfo.view(); view.beginSelect(); Imath::M44d projectionMatrix; glGetDoublev( GL_PROJECTION_MATRIX, projectionMatrix.getValue() ); view.endSelect(); view.beginGL(); glMatrixMode( GL_PROJECTION ); glLoadMatrixd( projectionMatrix.getValue() ); IECoreGL::Selector::Mode selectionMode = IECoreGL::Selector::IDRender; if( selectInfo.displayStatus() == M3dView::kHilite && !selectInfo.singleSelection() ) { selectionMode = IECoreGL::Selector::OcclusionQuery; } std::vector<IECoreGL::HitRecord> hits; { IECoreGL::Selector selector( Imath::Box2f( Imath::V2f( 0 ), Imath::V2f( 1 ) ), selectionMode, hits ); IECoreGL::State::bindBaseState(); selector.baseState()->bind(); scene->render( selector.baseState() ); if( selectInfo.displayStatus() != M3dView::kHilite ) { // We're not in component selection mode. We'd like to be able to select the scene shape // using the bounding box so we draw it too but only if it is visible MPlug pDrawBound( sceneShape->thisMObject(), SceneShape::aDrawRootBound ); bool drawBound; pDrawBound.getValue( drawBound ); if( drawBound ) { IECoreGL::BoxPrimitive::renderWireframe( IECore::convert<Imath::Box3f>( sceneShape->boundingBox() ) ); } } } view.endGL(); if( hits.empty() ) { return false; } // iterate over the hits, converting them into components and also finding // the closest one. MIntArray componentIndices; float depthMin = std::numeric_limits<float>::max(); int depthMinIndex = -1; for( unsigned int i=0, e = hits.size(); i < e; i++ ) { if( hits[i].depthMin < depthMin ) { depthMin = hits[i].depthMin; depthMinIndex = componentIndices.length(); } int index = sceneShape->selectionIndex( IECoreGL::NameStateComponent::nameFromGLName( hits[i].name ) ); componentIndices.append( index ); } assert( depthMinIndex >= 0 ); // figure out the world space location of the closest hit MDagPath camera; view.getCamera( camera ); MPoint worldIntersectionPoint; selectionRayToWorldSpacePoint( camera, selectInfo, depthMin, worldIntersectionPoint ); // turn the processed hits into appropriate changes to the current selection if( selectInfo.displayStatus() == M3dView::kHilite ) { // selecting components MFnSingleIndexedComponent fnComponent; MObject component = fnComponent.create( MFn::kMeshPolygonComponent, &s ); assert( s ); if( selectInfo.singleSelection() ) { fnComponent.addElement( componentIndices[depthMinIndex] ); } else { fnComponent.addElements( componentIndices ); } MSelectionList items; items.add( selectInfo.multiPath(), component ); MDagPath path = selectInfo.multiPath(); selectInfo.addSelection( items, worldIntersectionPoint, selectionList, worldSpaceSelectPts, MSelectionMask::kSelectMeshFaces, true ); } else { // Check if we should be able to select that object MPlug pObjectOnly( sceneShape->thisMObject(), SceneShape::aObjectOnly ); bool objectOnly; pObjectOnly.getValue( objectOnly ); if( objectOnly && !sceneShape->getSceneInterface()->hasObject() ) { return true; } // selecting objects MSelectionList item; item.add( selectInfo.selectPath() ); selectInfo.addSelection( item, worldIntersectionPoint, selectionList, worldSpaceSelectPts, MSelectionMask::kSelectMeshes, false ); } return true; }
MStatus sgCurveEditBrush_context::editCurve( MDagPath dagPathCurve, int beforeX, int beforeY, int currentX, int currentY, float radius, const MDoubleArray& dArrLength, MPointArray &points ) { MStatus status; if( radius < 0 ) return MS::kSuccess; MDagPath dagPathCam; M3dView view = M3dView::active3dView( &status ); CHECK_MSTATUS_AND_RETURN_IT( status ); view.getCamera( dagPathCam ); MPoint camPos = dagPathCam.inclusiveMatrix()[3]; MVector vCamUp = dagPathCam.inclusiveMatrix()[1]; vCamUp.normalize(); radius *= .05; MPoint nearClipBefore; MPoint farClipBefore; view.viewToWorld( beforeX, beforeY, nearClipBefore, farClipBefore ); MVector rayBefore = nearClipBefore - camPos; rayBefore.normalize(); rayBefore *= 20; MPoint posBefore = rayBefore + camPos; MPoint nearClipCurrent; MPoint farClipCurrent; view.viewToWorld( currentX, currentY, nearClipCurrent, farClipCurrent ); MVector rayCurrent = nearClipCurrent - camPos; rayCurrent.normalize(); rayCurrent *= 20; MPoint posCurrent = rayCurrent + camPos; MVector vMove = posCurrent - posBefore; MMatrix mtxCurve = dagPathCurve.inclusiveMatrix(); MFnNurbsCurve fnCurve( dagPathCurve ); fnCurve.getCVs( points ); for( int i=0; i< points.length(); i++ ) { points[i] *= mtxCurve; } for( int i=1; i< points.length(); i++ ) { MPoint cuPoint = points[i]; MVector vPoint = cuPoint - camPos; MVector projV = ( vPoint * rayBefore )/( pow( rayBefore.length(), 2 ) )* rayBefore; MVector vertical = vPoint - projV; float radiusForPoint = vertical.length() / projV.length(); if( radius < radiusForPoint ) continue; MPoint parentPoint = points[i-1]; MVector vCurveDirection = cuPoint - parentPoint; double vDirLength = vCurveDirection.length(); MVector vEditDirection = vCurveDirection + vMove/rayBefore.length()*projV.length(); double dotEdit = vCurveDirection.normal() * vEditDirection.normal(); if( dotEdit < 0 ) continue; vEditDirection = vEditDirection * dotEdit + vCurveDirection*( 1-dotEdit ); MVector vEditLength = vEditDirection / vEditDirection.length() * vCurveDirection.length(); MVector vEdit = (vEditLength - vCurveDirection) * pow((double)(1-radiusForPoint/radius), 1 ); points[i] += vEdit; for( int j=i+1; j< points.length(); j++ ) { MPoint beforePoint = points[j]; MPoint pPoint = points[j-1]; MPoint beforePPoint = pPoint - vEdit; MVector vBefore = points[j] - beforePPoint; MVector vAfter = points[j] - pPoint; MVector vCurrent = vAfter.normal() * dArrLength[j]; points[j] = vCurrent + pPoint; vEdit = points[j] - beforePoint; } } MMatrix invMtxCurve = mtxCurve.inverse(); for( int i=0; i< points.length(); i++ ) { points[i] *= invMtxCurve; } fnCurve.setCVs( points ); fnCurve.updateCurve(); return MS::kSuccess; }
MStatus viewRenderUserOperation::execute( const MHWRender::MDrawContext & drawContext ) { // Sample code to debug pass information static const bool debugPassInformation = false; if (debugPassInformation) { const MHWRender::MPassContext & passCtx = drawContext.getPassContext(); const MString & passId = passCtx.passIdentifier(); const MStringArray & passSem = passCtx.passSemantics(); printf("viewRenderUserOperation: drawing in pass[%s], semantic[", passId.asChar()); for (unsigned int i=0; i<passSem.length(); i++) printf(" %s", passSem[i].asChar()); printf("\n"); } // Example code to find the active override. // This is not necessary if the operations just keep a reference // to the override, but this demonstrates how this // contextual information can be extracted. // MHWRender::MRenderer *theRenderer = MHWRender::MRenderer::theRenderer(); const MHWRender::MRenderOverride *overridePtr = NULL; if (theRenderer) { const MString & overrideName = theRenderer->activeRenderOverride(); overridePtr = theRenderer->findRenderOverride( overrideName ); } // Some sample code to debug lighting information in the MDrawContext // if (fDebugLightingInfo) { viewRenderOverrideUtilities::printDrawContextLightInfo( drawContext ); } // Some sample code to debug other MDrawContext information // if (fDebugDrawContext) { MStatus status; MMatrix matrix = drawContext.getMatrix(MHWRender::MFrameContext::kWorldMtx, &status); double dest[4][4]; status = matrix.get(dest); printf("World matrix is:\n"); printf("\t%f, %f, %f, %f\n", dest[0][0], dest[0][1], dest[0][2], dest[0][3]); printf("\t%f, %f, %f, %f\n", dest[1][0], dest[1][1], dest[1][2], dest[1][3]); printf("\t%f, %f, %f, %f\n", dest[2][0], dest[2][1], dest[2][2], dest[2][3]); printf("\t%f, %f, %f, %f\n", dest[3][0], dest[3][1], dest[3][2], dest[3][3]); MDoubleArray viewDirection = drawContext.getTuple(MHWRender::MFrameContext::kViewDirection, &status); printf("Viewdirection is: %f, %f, %f\n", viewDirection[0], viewDirection[1], viewDirection[2]); MBoundingBox box = drawContext.getSceneBox(&status); printf("Screen box is:\n"); printf("\twidth=%f, height=%f, depth=%f\n", box.width(), box.height(), box.depth()); float center[4]; box.center().get(center); printf("\tcenter=(%f, %f, %f, %f)\n", center[0], center[1], center[2], center[3]); int originX, originY, width, height; status = drawContext.getViewportDimensions(originX, originY, width, height); printf("Viewport dimension: center(%d, %d), width=%d, heigh=%d\n", originX, originY, width, height); } // Draw some addition things for scene draw // M3dView mView; if (mPanelName.length() && (M3dView::getM3dViewFromModelPanel(mPanelName, mView) == MStatus::kSuccess)) { // Get the current viewport and scale it relative to that // int targetW, targetH; drawContext.getRenderTargetSize( targetW, targetH ); if (fDrawLabel) { MString testString("Drawing with override: "); testString += overridePtr->name(); MPoint pos(0.0,0.0,0.0); glColor3f( 1.0f, 1.0f, 1.0f ); mView.drawText( testString, pos); } // Some user drawing of scene bounding boxes // if (fDrawBoundingBoxes) { MDagPath cameraPath; mView.getCamera( cameraPath); MCustomSceneDraw userDraw; userDraw.draw( cameraPath, targetW, targetH ); } } return MStatus::kSuccess; }
bool DrawableHolderUI::select( MSelectInfo &selectInfo, MSelectionList &selectionList, MPointArray &worldSpaceSelectPts ) const { MStatus s; // early out if we're not selectable. we always allow components to be selected if we're highlighted, // but we don't allow ourselves to be selected as a whole unless meshes are in the selection mask. // it's not ideal that we act like a mesh, but it's at least consistent with the drawing mask we use. if( selectInfo.displayStatus() != M3dView::kHilite ) { MSelectionMask meshMask( MSelectionMask::kSelectMeshes ); if( !selectInfo.selectable( meshMask ) ) { return false; } } // early out if we have no scene to draw DrawableHolder *drawableHolder = static_cast<DrawableHolder *>( surfaceShape() ); IECoreGL::ConstScenePtr scene = drawableHolder->scene(); if( !scene ) { return false; } // we want to perform the selection using an IECoreGL::Selector, so we // can avoid the performance penalty associated with using GL_SELECT mode. // that means we don't really want to call view.beginSelect(), but we have to // call it just to get the projection matrix for our own selection, because as far // as i can tell, there is no other way of getting it reliably. M3dView view = selectInfo.view(); view.beginSelect(); Imath::M44d projectionMatrix; glGetDoublev( GL_PROJECTION_MATRIX, projectionMatrix.getValue() ); view.endSelect(); view.beginGL(); glMatrixMode( GL_PROJECTION ); glLoadMatrixd( projectionMatrix.getValue() ); IECoreGL::Selector::Mode selectionMode = IECoreGL::Selector::IDRender; if( selectInfo.displayStatus() == M3dView::kHilite && !selectInfo.singleSelection() ) { selectionMode = IECoreGL::Selector::OcclusionQuery; } std::vector<IECoreGL::HitRecord> hits; { IECoreGL::Selector selector( Imath::Box2f( Imath::V2f( 0 ), Imath::V2f( 1 ) ), selectionMode, hits ); IECoreGL::State::bindBaseState(); selector.baseState()->bind(); scene->render( selector.baseState() ); } view.endGL(); if( !hits.size() ) { return false; } // find the depth of the closest hit: MIntArray componentIndices; float depthMin = std::numeric_limits<float>::max(); for( int i=0, e = hits.size(); i < e; i++ ) { if( hits[i].depthMin < depthMin ) { depthMin = hits[i].depthMin; } } // figure out the world space location of the closest hit MDagPath camera; view.getCamera( camera ); MFnCamera fnCamera( camera.node() ); float near = fnCamera.nearClippingPlane(); float far = fnCamera.farClippingPlane(); float z = -1; if( fnCamera.isOrtho() ) { z = Imath::lerp( near, far, depthMin ); } else { // perspective camera - depth isn't linear so linearise to get z float a = far / ( far - near ); float b = far * near / ( near - far ); z = b / ( depthMin - a ); } MPoint localRayOrigin; MVector localRayDirection; selectInfo.getLocalRay( localRayOrigin, localRayDirection ); MMatrix localToCamera = selectInfo.selectPath().inclusiveMatrix() * camera.inclusiveMatrix().inverse(); MPoint cameraRayOrigin = localRayOrigin * localToCamera; MVector cameraRayDirection = localRayDirection * localToCamera; MPoint cameraIntersectionPoint = cameraRayOrigin + cameraRayDirection * ( -( z - near ) / cameraRayDirection.z ); MPoint worldIntersectionPoint = cameraIntersectionPoint * camera.inclusiveMatrix(); MSelectionList item; item.add( selectInfo.selectPath() ); selectInfo.addSelection( item, worldIntersectionPoint, selectionList, worldSpaceSelectPts, MSelectionMask::kSelectMeshes, false ); return true; }
MStatus MayaRenderView::redoIt() { MStatus retStatus; //get the camera MDagPath camera; if(_camera != std::string("")){ MObject node = getNode(MString(_camera.c_str()),&retStatus); camera.getAPathTo(node); } else { M3dView view = M3dView::active3dView(&retStatus); CHECKERR(retStatus,"M3dView::active3dView"); retStatus = view.getCamera(camera); CHECKERR(retStatus,"M3dView::active3dView"); } MString integrator; MFnDependencyNode globals(raytraceGlobalsNode::get()); getCustomAttribute(integrator, "integrator", globals); cout << "Integrator: " << integrator.asChar() << endl; MString intersector; getCustomAttribute(intersector, "intersector", globals); cout << "Intersector: " << intersector.asChar() << endl; MString sampler; getCustomAttribute(sampler, "sampler", globals); cout << "Sampler: " << sampler.asChar() << endl; //set the renderview retStatus = MRenderView::setCurrentCamera (camera ); CHECKERR(retStatus,"MRenderView::setCurrentCamera (camera )"); cout << "Camera: " << camera.fullPathName().asChar() << endl; int xSize = _resolution.x(),ySize = _resolution.y(); // raytrace things MayaSceneReader* mayaReader(new MayaSceneReader()); boost::shared_ptr<Raytrace::ISceneReader> sceneReader(mayaReader); mayaReader->SetResolution(_resolution); mayaReader->SetCamera(camera); Raytrace::Output output = Raytrace::CreateCustomOutput(sceneReader,"mayaOutput"); RV_PIXEL* pixels = new RV_PIXEL[xSize*ySize]; MRenderView::startRender ( xSize, ySize, true, true ); output->SetIntegrator(integrator.asChar()); output->SetIntersector(intersector.asChar()); output->SetSampler(sampler.asChar()); output->SetOutputSurface(pixels, sizeof(RV_PIXEL)*xSize*ySize, xSize, ySize, Raytrace::IOutput::FORMAT_RGBA_F32); output->Refresh(); std::cout << "Begin Rendering" << std::endl; Raytrace::String status; while(true) { if(output->GetLastFrameInfo(status) == Raytrace::Result::RenderingComplete) break; std::cout << "Update Output : " << status << std::endl; output->UpdateOutput(); MRenderView::updatePixels(0,xSize-1,0,ySize-1, pixels,true); MRenderView::refresh(0,xSize-1,0,ySize-1); Sleep(1000); } std::cout << "Complete Rendering: " << status << std::endl; output->UpdateOutput(); MRenderView::updatePixels(0,xSize-1,0,ySize-1, pixels,true); MRenderView::endRender(); return MS::kSuccess; }
void ProxyViz::draw( M3dView & view, const MDagPath & path, M3dView::DisplayStyle style, M3dView::DisplayStatus status ) { if(!m_enableCompute) return; MObject thisNode = thisMObject(); updateWorldSpace(thisNode); MPlug mutxplug( thisNode, axmultiplier); MPlug mutyplug( thisNode, aymultiplier); MPlug mutzplug( thisNode, azmultiplier); setScaleMuliplier(mutxplug.asFloat(), mutyplug.asFloat(), mutzplug.asFloat() ); MPlug svtPlug(thisNode, adisplayVox); setShowVoxLodThresold(svtPlug.asFloat() ); MDagPath cameraPath; view.getCamera(cameraPath); if(hasView() ) updateViewFrustum(thisNode); else updateViewFrustum(cameraPath); setViewportAspect(view.portWidth(), view.portHeight() ); MPlug actp(thisNode, aactivated); if(actp.asBool()) setWireColor(.125f, .1925f, .1725f); else setWireColor(.0675f, .0675f, .0675f); _viewport = view; fHasView = 1; view.beginGL(); double mm[16]; matrix_as_array(_worldInverseSpace, mm); glPushMatrix(); glMultMatrixd(mm); ExampVox * defBox = plantExample(0); updateGeomBox(defBox, thisNode); drawWireBox(defBox->geomCenterV(), defBox->geomScale() ); Matrix44F mat; mat.setFrontOrientation(Vector3F::YAxis); mat.scaleBy(defBox->geomSize() ); mat.glMatrix(m_transBuf); drawCircle(m_transBuf); drawGridBounding(); // drawGrid(); if ( style == M3dView::kFlatShaded || style == M3dView::kGouraudShaded ) { drawPlants(); } else drawWiredPlants(); if(hasView() ) drawViewFrustum(); drawBrush(view); drawActivePlants(); drawGround(); glPopMatrix(); view.endGL(); std::cout<<" viz node draw end"; }
bool ProceduralHolderUI::select( MSelectInfo &selectInfo, MSelectionList &selectionList, MPointArray &worldSpaceSelectPts ) const { MStatus s; // early out if we're not selectable. we always allow components to be selected if we're highlighted, // but we don't allow ourselves to be selected as a whole unless meshes are in the selection mask. // it's not ideal that we act like a mesh, but it's at least consistent with the drawing mask we use. if( selectInfo.displayStatus() != M3dView::kHilite ) { MSelectionMask meshMask( MSelectionMask::kSelectMeshes ); if( !selectInfo.selectable( meshMask ) ) { return false; } } // early out if we have no scene to draw ProceduralHolder *proceduralHolder = static_cast<ProceduralHolder *>( surfaceShape() ); IECoreGL::ConstScenePtr scene = proceduralHolder->scene(); if( !scene ) { return false; } // we want to perform the selection using an IECoreGL::Selector, so we // can avoid the performance penalty associated with using GL_SELECT mode. // that means we don't really want to call view.beginSelect(), but we have to // call it just to get the projection matrix for our own selection, because as far // as i can tell, there is no other way of getting it reliably. M3dView view = selectInfo.view(); view.beginSelect(); Imath::M44d projectionMatrix; glGetDoublev( GL_PROJECTION_MATRIX, projectionMatrix.getValue() ); view.endSelect(); view.beginGL(); glMatrixMode( GL_PROJECTION ); glLoadMatrixd( projectionMatrix.getValue() ); IECoreGL::Selector::Mode selectionMode = IECoreGL::Selector::IDRender; if( selectInfo.displayStatus() == M3dView::kHilite && !selectInfo.singleSelection() ) { selectionMode = IECoreGL::Selector::OcclusionQuery; } std::vector<IECoreGL::HitRecord> hits; { IECoreGL::Selector selector( Imath::Box2f( Imath::V2f( 0 ), Imath::V2f( 1 ) ), selectionMode, hits ); IECoreGL::State::bindBaseState(); selector.baseState()->bind(); scene->render( selector.baseState() ); if( selectInfo.displayStatus() != M3dView::kHilite ) { // we're not in component selection mode. we'd like to be able to select the procedural // object using the bounding box so we draw it too. MPlug pDrawBound( proceduralHolder->thisMObject(), ProceduralHolder::aDrawBound ); bool drawBound = true; pDrawBound.getValue( drawBound ); if( drawBound ) { IECoreGL::BoxPrimitive::renderWireframe( IECore::convert<Imath::Box3f>( proceduralHolder->boundingBox() ) ); } } } view.endGL(); if( !hits.size() ) { return false; } // iterate over the hits, converting them into components and also finding // the closest one. MIntArray componentIndices; float depthMin = std::numeric_limits<float>::max(); int depthMinIndex = -1; for( int i=0, e = hits.size(); i < e; i++ ) { if( hits[i].depthMin < depthMin ) { depthMin = hits[i].depthMin; depthMinIndex = componentIndices.length(); } ProceduralHolder::ComponentsMap::const_iterator compIt = proceduralHolder->m_componentsMap.find( hits[i].name.value() ); assert( compIt != proceduralHolder->m_componentsMap.end() ); componentIndices.append( compIt->second.first ); } assert( depthMinIndex >= 0 ); // figure out the world space location of the closest hit MDagPath camera; view.getCamera( camera ); MFnCamera fnCamera( camera.node() ); float near = fnCamera.nearClippingPlane(); float far = fnCamera.farClippingPlane(); float z = -1; if( fnCamera.isOrtho() ) { z = Imath::lerp( near, far, depthMin ); } else { // perspective camera - depth isn't linear so linearise to get z float a = far / ( far - near ); float b = far * near / ( near - far ); z = b / ( depthMin - a ); } MPoint localRayOrigin; MVector localRayDirection; selectInfo.getLocalRay( localRayOrigin, localRayDirection ); MMatrix localToCamera = selectInfo.selectPath().inclusiveMatrix() * camera.inclusiveMatrix().inverse(); MPoint cameraRayOrigin = localRayOrigin * localToCamera; MVector cameraRayDirection = localRayDirection * localToCamera; MPoint cameraIntersectionPoint = cameraRayOrigin + cameraRayDirection * ( -( z - near ) / cameraRayDirection.z ); MPoint worldIntersectionPoint = cameraIntersectionPoint * camera.inclusiveMatrix(); // turn the processed hits into appropriate changes to the current selection if( selectInfo.displayStatus() == M3dView::kHilite ) { // selecting components MFnSingleIndexedComponent fnComponent; MObject component = fnComponent.create( MFn::kMeshPolygonComponent, &s ); assert( s ); if( selectInfo.singleSelection() ) { fnComponent.addElement( componentIndices[depthMinIndex] ); } else { fnComponent.addElements( componentIndices ); } MSelectionList items; items.add( selectInfo.multiPath(), component ); selectInfo.addSelection( items, worldIntersectionPoint, selectionList, worldSpaceSelectPts, MSelectionMask::kSelectMeshFaces, true ); } else { // selecting objects MSelectionList item; item.add( selectInfo.selectPath() ); selectInfo.addSelection( item, worldIntersectionPoint, selectionList, worldSpaceSelectPts, MSelectionMask::kSelectMeshes, false ); } return true; }
/* virtual */ MStatus hwDecalBumpShader_NV20::bind(const MDrawRequest& request, M3dView& view) { MStatus status; // Get the diffuse color // float diffuse_color[4]; status = getFloat3(color, diffuse_color); diffuse_color[3] = 1.0; if (!status) return status; // Get the light color // float light_color[4]; light_color[3] = 1.0f; status = getFloat3(lightColor, light_color); if (!status) return status; // Get the light direction (for directionalLight) // status = getFloat3(light, &lightRotation[0]); if (!status) return status; // Get the bumpScale value // float bumpScaleValue = 2.0f; // Get the bumpMap type // bool isHeightFieldMap = true; // Direction of the directional light // // Convert the light direction (which is assumed in originally be in world space, in euler coordinates) // into an eye space vector. // double scale = M_PI/180.0; // Internal rotations are in radian and not in degrees MEulerRotation lightRot( lightRotation[0] * scale, lightRotation[1] * scale, lightRotation[2] * scale ); MVector light_v = MVector(0, 0, -1).rotateBy( lightRot ); // WS light vector MDagPath camDag; view.getCamera(camDag); light_v = light_v * camDag.inclusiveMatrixInverse(); lightRotation[0] = (float) light_v[0]; lightRotation[1] = (float) light_v[1]; lightRotation[2] = (float) light_v[2]; // Get the camera position // status = getFloat3(camera, &cameraPos[0]); if (!status) return status; // Get the decal and bump map file names // MString decalName = ""; MString bumpName = ""; ShadingConnection colorConnection(thisMObject(), request.multiPath().partialPathName(), "color"); ShadingConnection bumpConnection (thisMObject(), request.multiPath().partialPathName(), "bump"); // If the color attribute is ultimately connected to a file texture, find its filename. // otherwise use the default color texture. if (colorConnection.type() == ShadingConnection::TEXTURE && colorConnection.texture().hasFn(MFn::kFileTexture)) { // Get the filename of the texture. MFnDependencyNode textureNode(colorConnection.texture()); MPlug filenamePlug( colorConnection.texture(), textureNode.attribute(MString("fileTextureName")) ); filenamePlug.getValue(decalName); } // If the bump attribute is ultimately connected to a file texture, find its filename. // otherwise use the default bump texture. if (bumpConnection.type() == ShadingConnection::TEXTURE && bumpConnection.texture().hasFn(MFn::kFileTexture)) { // Get the filename of the texture. MFnDependencyNode textureNode(colorConnection.texture()); MPlug filenamePlug( bumpConnection.texture(), textureNode.attribute(MString("fileTextureName")) ); filenamePlug.getValue(bumpName); } // Fail safe quit // if (bumpName.length() == 0 || decalName.length() == 0) { view.beginGL(); glPushAttrib( GL_ALL_ATTRIB_BITS ); // This might be too conservative glPushClientAttrib(GL_CLIENT_VERTEX_ARRAY_BIT); glColorMaterial(GL_FRONT_AND_BACK, GL_AMBIENT_AND_DIFFUSE); glEnable(GL_COLOR_MATERIAL); glColor4fv(diffuse_color); view.endGL(); return MS::kSuccess; } view.beginGL(); glPushAttrib( GL_ALL_ATTRIB_BITS ); glPushClientAttrib(GL_CLIENT_VERTEX_ARRAY_BIT); /* Starts Here... */ glEnable(GL_TEXTURE_SHADER_NV); // stage 0 -- decal map glActiveTextureARB( GL_TEXTURE0_ARB ); if(m_pTextureCache) m_pTextureCache->bind(colorConnection.texture(), MTexture::RGBA, false); glTexEnvi(GL_TEXTURE_SHADER_NV, GL_SHADER_OPERATION_NV, GL_TEXTURE_2D); // stage 1 -- bumpped normal map glActiveTextureARB( GL_TEXTURE1_ARB ); // We need to be able to pass the bumpScaleValue // to the texture cache and rebuild the bump or normal map if( isHeightFieldMap ) { // convert the HeightField to the NormalMap if(m_pTextureCache) m_pTextureCache->bind(bumpConnection.texture(), MTexture::NMAP, false); } else { if(m_pTextureCache) m_pTextureCache->bind(bumpConnection.texture(), MTexture::RGBA, false); } glTexEnvi(GL_TEXTURE_SHADER_NV, GL_SHADER_OPERATION_NV, GL_TEXTURE_2D); // stage 2 -- dot product (diffuse component) glActiveTextureARB( GL_TEXTURE2_ARB ); glTexEnvi(GL_TEXTURE_SHADER_NV, GL_SHADER_OPERATION_NV, GL_DOT_PRODUCT_NV); glTexEnvi(GL_TEXTURE_SHADER_NV, GL_RGBA_UNSIGNED_DOT_PRODUCT_MAPPING_NV, GL_EXPAND_NORMAL_NV); glTexEnvi(GL_TEXTURE_SHADER_NV, GL_PREVIOUS_TEXTURE_INPUT_NV, GL_TEXTURE1_ARB); // stage 3 -- dot product (specular component) glActiveTextureARB( GL_TEXTURE3_ARB ); bind_lookup_table(); // 2D texture to get the diffuse and specular illumination glTexEnvi(GL_TEXTURE_SHADER_NV, GL_SHADER_OPERATION_NV, GL_DOT_PRODUCT_TEXTURE_2D_NV); glTexEnvi(GL_TEXTURE_SHADER_NV, GL_RGBA_UNSIGNED_DOT_PRODUCT_MAPPING_NV, GL_EXPAND_NORMAL_NV); glTexEnvi(GL_TEXTURE_SHADER_NV, GL_PREVIOUS_TEXTURE_INPUT_NV, GL_TEXTURE1_ARB); // With light color and intensity // glCombinerParameterfvNV(GL_CONSTANT_COLOR0_NV, diffuse_color); glCombinerParameterfvNV(GL_CONSTANT_COLOR1_NV, light_color); // The register combiner will do the multiplication between // the illumination and the decal color // glEnable(GL_REGISTER_COMBINERS_NV); #ifndef DEBUGGING_VERTEX_PROGRAM glCombinerParameteriNV(GL_NUM_GENERAL_COMBINERS_NV, 2); #else // For testing, only use one general register combiner. glCombinerParameteriNV(GL_NUM_GENERAL_COMBINERS_NV, 1); #endif float constColor0[4]; constColor0[0] = constColor0[1] = constColor0[2] = constColor0[3] = 1.0; glCombinerParameterfvNV(GL_CONSTANT_COLOR0_NV, constColor0); #ifndef DEBUGGING_VERTEX_PROGRAM // Combiner stage 0 does the illumination modulation on the surface decal color // glCombinerInputNV(GL_COMBINER0_NV, GL_RGB, GL_VARIABLE_A_NV, GL_TEXTURE0_ARB, GL_UNSIGNED_IDENTITY_NV, GL_RGB); glCombinerInputNV(GL_COMBINER0_NV, GL_RGB, GL_VARIABLE_B_NV, GL_TEXTURE3_ARB, GL_UNSIGNED_IDENTITY_NV, GL_RGB); glCombinerInputNV(GL_COMBINER0_NV, GL_RGB, GL_VARIABLE_C_NV, GL_TEXTURE0_ARB, GL_UNSIGNED_IDENTITY_NV, GL_ALPHA); glCombinerInputNV(GL_COMBINER0_NV, GL_RGB, GL_VARIABLE_D_NV, GL_TEXTURE3_ARB, GL_UNSIGNED_IDENTITY_NV, GL_ALPHA); glCombinerOutputNV(GL_COMBINER0_NV, GL_RGB, GL_DISCARD_NV, GL_DISCARD_NV, GL_SPARE1_NV, GL_NONE, GL_NONE, GL_FALSE, GL_FALSE, GL_FALSE); // Combiner stage 1, modulate the surface color by the light color // glCombinerInputNV(GL_COMBINER1_NV, GL_RGB, GL_VARIABLE_A_NV, GL_SPARE1_NV, GL_UNSIGNED_IDENTITY_NV, GL_RGB); glCombinerInputNV(GL_COMBINER1_NV, GL_RGB, GL_VARIABLE_B_NV, GL_CONSTANT_COLOR1_NV, GL_UNSIGNED_IDENTITY_NV, GL_RGB); glCombinerOutputNV(GL_COMBINER1_NV, GL_RGB, GL_DISCARD_NV, GL_DISCARD_NV, GL_SPARE1_NV, GL_NONE, GL_NONE, GL_FALSE, GL_FALSE, GL_FALSE); #else // Simplified register combiners to help debugging vertex program. glCombinerInputNV(GL_COMBINER0_NV, GL_RGB, GL_VARIABLE_A_NV, GL_PRIMARY_COLOR_NV, GL_UNSIGNED_IDENTITY_NV, GL_RGB); glCombinerInputNV(GL_COMBINER0_NV, GL_RGB, GL_VARIABLE_B_NV, GL_CONSTANT_COLOR0_NV, GL_UNSIGNED_IDENTITY_NV, GL_RGB); glCombinerInputNV(GL_COMBINER0_NV, GL_RGB, GL_VARIABLE_C_NV, GL_TEXTURE0_ARB, GL_UNSIGNED_IDENTITY_NV, GL_ALPHA); glCombinerInputNV(GL_COMBINER0_NV, GL_RGB, GL_VARIABLE_D_NV, GL_TEXTURE3_ARB, GL_UNSIGNED_IDENTITY_NV, GL_ALPHA); glCombinerOutputNV(GL_COMBINER0_NV, GL_RGB, GL_SPARE1_NV, GL_DISCARD_NV, GL_DISCARD_NV, GL_NONE, GL_NONE, GL_FALSE, GL_FALSE, GL_FALSE); #endif // DEBUGGING_VERTEX_PROGRAM // The final Combiner just pass through // glFinalCombinerInputNV(GL_VARIABLE_A_NV, GL_ZERO, GL_UNSIGNED_IDENTITY_NV, GL_RGB); glFinalCombinerInputNV(GL_VARIABLE_B_NV, GL_ZERO, GL_UNSIGNED_IDENTITY_NV, GL_RGB); glFinalCombinerInputNV(GL_VARIABLE_C_NV, GL_ZERO, GL_UNSIGNED_IDENTITY_NV, GL_RGB); glFinalCombinerInputNV(GL_VARIABLE_D_NV, GL_SPARE1_NV, GL_UNSIGNED_IDENTITY_NV, GL_RGB); view.endGL(); return MS::kSuccess; }