Beispiel #1
0
void SceneShapeUI::selectionRayToWorldSpacePoint( const MDagPath &camera, const MSelectInfo &selectInfo, float depth, MPoint &worldIntersectionPoint ) const
{
	MPoint localRayOrigin;
	MVector localRayDirection;
	selectInfo.getLocalRay( localRayOrigin, localRayDirection );

	MFnCamera fnCamera( camera.node() );
	float near = fnCamera.nearClippingPlane();
	float far = fnCamera.farClippingPlane();
	float z = -1;

	if( fnCamera.isOrtho() )
	{
		z = Imath::lerp( near, far, depth );
	}
	else
	{
		// perspective camera - depth isn't linear so linearise to get z
		float a = far / ( far - near );
		float b = far * near / ( near - far );
		z = b / ( depth - a );
	}	

	MMatrix localToCamera = selectInfo.selectPath().inclusiveMatrix() * camera.inclusiveMatrix().inverse();	
	MPoint cameraRayOrigin = localRayOrigin * localToCamera;
	MVector cameraRayDirection = localRayDirection * localToCamera;

	MPoint cameraIntersectionPoint = cameraRayOrigin + cameraRayDirection * ( -( z - near ) / cameraRayDirection.z );
	worldIntersectionPoint = cameraIntersectionPoint * camera.inclusiveMatrix();
}
Beispiel #2
0
void ProxyViz::updateViewFrustum(const MDagPath & cameraPath)
{
	MMatrix cameraMat = cameraPath.inclusiveMatrix();
	AHelper::ConvertToMatrix44F(*cameraSpaceR(), cameraMat);
	MMatrix cameraInvMat = cameraPath.inclusiveMatrixInverse();
	AHelper::ConvertToMatrix44F(*cameraInvSpaceR(), cameraInvMat);
	float peye[3];
	peye[0] = cameraMat.matrix[3][0];
	peye[1] = cameraMat.matrix[3][1];
	peye[2] = cameraMat.matrix[3][2];
	setEyePosition(peye);
	
    float farClip = -20.f;
    if(numPlants() > 0) getFarClipDepth(farClip, gridBoundingBox() );
    
	MFnCamera fcam(cameraPath.node() );
	if(fcam.isOrtho() ) {
		float orthoW = fcam.orthoWidth();
		float orthoH = orthoW * fcam.aspectRatio();
		setOrthoFrustum(orthoW, orthoH, -10.f, farClip );
		
	} else {
		float hfa = fcam.horizontalFilmAperture();
		float vfa = fcam.verticalFilmAperture();
		float fl = fcam.focalLength();
	
		setFrustum(hfa, vfa, fl, -10.f, farClip );
	}
	setOverscan(fcam.overscan() );
}
void printAllInstancesUsingIterator()
{
	//
	//	Just use the MItInstancer iterator to enumerate all particles in 
	//	all instancers in the scene.
	//
	MItInstancer it;
	while( !it.isDone() )
	{
		MObject instancerNode = it.instancer();
		MDagPath instancerPath = it.instancerPath();
		MDagPath instancePath = it.path();
		MMatrix instanceMatrix = it.matrix();

		MString instancerNodeName = MFnDependencyNode(instancerNode).name();
		MString instancerPathName = instancerPath.fullPathName();
		MString instancePathName = instancePath.fullPathName();
		
		MMatrix pathMatrix = instancePath.inclusiveMatrix();
		MMatrix finalMatrixForPath = pathMatrix * instanceMatrix;
		MPoint pos = MPoint::origin * finalMatrixForPath;

		char str[512];
		sprintf( str, "Instancer node %s, instancer path %s, instancing path %s at position (%lf,%lf,%lf)",
				instancerNodeName.asChar(), instancerPathName.asChar(), instancePathName.asChar(), pos.x, pos.y, pos.z );
		MGlobal::displayInfo( MString(str) );
		it.next();
	}
}
MStatus lrutils::getAllWorldTransforms(MObject ctlObj, std::map<double, MMatrix>& ctlWorldMatrices) {
    MStatus status = MS::kFailure;
    
    MFnTransform ctlFn( ctlObj );
    MGlobal::select(ctlObj, MGlobal::kReplaceList);
    double nextKeyTime = 0;
    double prevKeyTime = -1;
    double currentTime;
    MGlobal::executeCommand("currentTime -q;",currentTime);
    MGlobal::viewFrame(1);
    while(nextKeyTime > prevKeyTime) {
        prevKeyTime = nextKeyTime;
        MGlobal::executeCommand("findKeyframe -timeSlider -which next;",nextKeyTime);
        MGlobal::viewFrame(nextKeyTime);
        MDagPath path;
        status = ctlFn.getPath(path);
        MyCheckStatus(status, "MFnDagNode.getPath() failed");
        MMatrix ctlWorldMatrix = path.inclusiveMatrix(&status);
        MyCheckStatus(status, "MDagPath.inclusiveMatrix() failed");
        ctlWorldMatrices.insert(std::pair<double, MMatrix>(nextKeyTime,ctlWorldMatrix));
    }
    MGlobal::viewFrame(currentTime);

    status = MS::kSuccess;
    return status;
}
Beispiel #5
0
MStatus
MannequinMoveManipulator::connectToDependNode(const MObject &dependNode) {
  MStatus status;
  MFnDependencyNode nodeFn(dependNode, &status);

  if (!status)
     return MS::kFailure;

  MPlug translatePlug = nodeFn.findPlug("translate", &status);

  if (!status)
     return MS::kFailure;

  int plugIndex = 0;
  status = connectPlugToValue(translatePlug, _translateIndex, plugIndex);

  if (!status)
     return MS::kFailure;

  MFnDagNode dagNodeFn(dependNode);
  MDagPath nodePath;
  dagNodeFn.getPath(nodePath);

  MTransformationMatrix m(nodePath.exclusiveMatrix());
  _parentXform = m;

  MTransformationMatrix n(nodePath.inclusiveMatrix());
  _childXform = n;

  finishAddingManips();
  return MPxManipulatorNode::connectToDependNode(dependNode);
}
Beispiel #6
0
MMatrix GetGlobalMMatrix(const MObject & in_Ref)
{
   MMatrix result;
   result.setToIdentity();

   MFnDagNode node(in_Ref);
   MDagPathArray paths;
   node.getAllPaths(paths);
   MDagPath path = paths[0];

   MString typeStr = in_Ref.apiTypeStr();
   if(typeStr == "kTransform")
   {
      result = path.inclusiveMatrix();
   }
   else
   {
      for(unsigned int i=0;i<node.parentCount();i++)
      {
         MObject parent = node.parent(i);
         typeStr = parent.apiTypeStr();
         if(typeStr == "kTransform")
         {
            result = GetGlobalMMatrix(parent);
            //break;
         }
      }
   }

   return result;
}
MStatus lrutils::updateAnimationKeys(MObject ctlObj, MVectorArray ctlLocation) {
    MStatus status;
    MFnTransform ctlFn(ctlObj);
    //get the controller's group object
    //save the original controller parent path
    MString ctlGroupParentPath;
    MStringArray result;
    MGlobal::executeCommand("listRelatives -p -f "+ctlFn.fullPathName()+";",result);
    ctlGroupParentPath = result[0];
    MObject ctlGroupObj;
    lrutils::getObjFromName(ctlGroupParentPath, ctlGroupObj);
    MFnTransform ctlGroupFn(ctlGroupObj);

    MyCheckStatus(status, "getMetaNodeConnection() failed");
    std::map<double, MMatrix> ctlWorldMatrices;
    status = lrutils::getAllWorldTransforms(ctlObj, ctlWorldMatrices);
    MyCheckStatus(status, "lrutils::getAllWorldTransforms() failed");

    //set the old controller group translation to the new location
    lrutils::updateControlGroupLocation(ctlObj, ctlLocation);

    //find the global transformation matrix of the controller group
    MDagPath groupPath;
    status = ctlGroupFn.getPath(groupPath);
    MyCheckStatus(status, "MFnDagNode.getPath() failed");
    MMatrix ctlGroupWorldMatrix = groupPath.inclusiveMatrix(&status);
    MyCheckStatus(status, "MDagPath.inclusiveMatrix() failed");
    
    //update the animation curves attached to the old controller
    lrutils::updateAnimCurves(ctlObj, ctlWorldMatrices, ctlGroupWorldMatrix);

    return MS::kSuccess;
}
Beispiel #8
0
void ProxyViz::updateWorldSpace(const MObject & thisNode)
{
	MDagPath thisPath;
	MDagPath::getAPathTo(thisNode, thisPath);
	_worldSpace = thisPath.inclusiveMatrix();
	_worldInverseSpace = thisPath.inclusiveMatrixInverse();
}
IECore::ObjectPtr FromMayaCameraConverter::doConversion( const MDagPath &dagPath, IECore::ConstCompoundObjectPtr operands ) const
{
	MFnCamera fnCamera( dagPath );

	// convert things that are required by the IECore::Renderer specification

	CameraPtr result = new Camera;
	result->setName( IECore::convert<std::string>( fnCamera.name() ) );

	result->setTransform( new MatrixTransform( IECore::convert<Imath::M44f>( dagPath.inclusiveMatrix() ) ) );

	V2i resolution;
	if( operands->member<IntData>( "resolutionMode" )->readable()==RenderGlobals )
	{
		MCommonRenderSettingsData renderSettings;
		MRenderUtil::getCommonRenderSettings( renderSettings );
		resolution = Imath::V2i( renderSettings.width, renderSettings.height );
	}
	else
	{
		resolution = operands->member<V2iData>( "resolution" )->readable();
	}
	result->parameters()["resolution"] = new V2iData( resolution );

	Imath::V2f clippingPlanes = Imath::V2f( fnCamera.nearClippingPlane(), fnCamera.farClippingPlane() );
	result->parameters()["clippingPlanes"] = new V2fData( clippingPlanes );

	Imath::Box2d frustum;
	fnCamera.getRenderingFrustum( (float)resolution.x / (float)resolution.y, frustum.min.x, frustum.max.x, frustum.min.y, frustum.max.y );

	if( fnCamera.isOrtho() )
	{
		// orthographic
		result->parameters()["projection"] = new StringData( "orthographic" );
		result->parameters()["screenWindow"] = new Box2fData( Box2f( frustum.min, frustum.max ) );
	}
	else
	{
		// perspective
		result->parameters()["projection"] = new StringData( "perspective" );

		// derive horizontal field of view from the viewing frustum
		float fov = Math<double>::atan( frustum.max.x / clippingPlanes[0] ) * 2.0f;
		fov = radiansToDegrees( fov );
		result->parameters()["projection:fov"] = new FloatData( fov );

		// scale the frustum so that it's -1,1 in x and that gives us the screen window
		float frustumScale = 2.0f/(frustum.max.x - frustum.min.x);
		Box2f screenWindow( V2f( -1, frustum.min.y * frustumScale ), V2f( 1, frustum.max.y * frustumScale ) );
		result->parameters()["screenWindow"] = new Box2fData( screenWindow );
	}

	// and add on other bits and bobs from maya attributes as blind data
	CompoundDataPtr maya = new CompoundData;
	result->blindData()->writable()["maya"] = maya;
	maya->writable()["aperture"] = new V2fData( Imath::V2f( fnCamera.horizontalFilmAperture(), fnCamera.verticalFilmAperture() ) );

	return result;
}
Beispiel #10
0
//  ========== DtCameraGetPosition ==========
//
//  SYNOPSIS
//	Return the camera position.
//
int DtCameraGetPosition( int cameraID,
						 float* xTran,
						 float* yTran,
						 float* zTran )
{
    // Check for error.
    //
	if( (cameraID < 0) || (cameraID >= local->camera_ct ) )
	{
		*xTran = 0.0;
		*yTran = 0.0;
		*zTran = 0.0;
		return( 0 );
	}

	MStatus stat = MS::kSuccess;

    MFnDagNode fnDagNode( local->cameras[cameraID].transformNode, &stat );
    MDagPath aPath;
    stat = fnDagNode.getPath( aPath );
	
    // Get the global transformation matrix of the camera node.
    //
    MMatrix globalMatrix = aPath.inclusiveMatrix();

	MFnCamera fnCamera( local->cameras[cameraID].cameraShapeNode, &stat );

	MPoint eyePt;
    double eyePos[4]; 

	if( MS::kSuccess == stat )
	{
	    eyePt = fnCamera.eyePoint( MSpace::kObject, &stat );

		if( DtExt_Debug() & DEBUG_CAMERA )
		{
			cerr << "eye point is at " 
				 << eyePt.x << " " << eyePt.y << " " << eyePt.z << endl;
		}

	    eyePt *= globalMatrix;
	    eyePt.get( eyePos );

	}
	else
	{
		DtExt_Err( "Error in getting the camera function set\n" );
	}

    // Return values:
    //
	*xTran = eyePos[0];
	*yTran = eyePos[1];
	*zTran = eyePos[2];

	return( 1 );
}  // DtCameraGetPosition //
Beispiel #11
0
	void TransformNode::createInstance(Core::SmartPtr<Model::IInstanceCreator> model, Core::SmartPtr<Core::SceneGraphNode> &sceneGraph)
	{
		Core::SmartPtr<Instances::Instance> instance = model->createInstance();
		MFnDagNode fnNode(node);
		MDagPath dagPath;
		fnNode.getPath(dagPath);
		MMatrix worldTransform = dagPath.inclusiveMatrix();
		Maya::set(instance->worldTransform, worldTransform);
		sceneGraph->insert(instance);
	}
MStatus PluginTestUserOperation::execute(const MHWRender::MDrawContext & drawContext)
{
	//return MStatus::kSuccess;
	M3dView view;
	if(M3dView::getM3dViewFromModelPanel(panelName, view) == MStatus::kSuccess)
	{
		// Get the current viewport and scale it relative to that
		//
		int targetW, targetH;
		drawContext.getRenderTargetSize(targetW, targetH);

		// Some user drawing of scene bounding boxes
		//
		MDagPath cameraPath;
		MFnCamera fnCamera;
		view.getCamera(cameraPath);
		MMatrix m3dViewProjection, m3dViewModelView;
		view.projectionMatrix(m3dViewProjection);
		view.modelViewMatrix(m3dViewModelView);
		MFloatMatrix m3dFloatViewProjection(m3dViewProjection.matrix);
		MFloatMatrix m3dFloatViewModelView(m3dViewModelView.matrix);
		MFloatMatrix viewProjection = m3dFloatViewModelView * m3dFloatViewProjection;
		SurfaceDrawTraversal traversal;
		traversal.enableFiltering(true);
		traversal.setFrustum(cameraPath, targetW, targetH);
		traversal.traverse();
		unsigned int numItems = traversal.numberOfItems();
		MFnMesh fnMesh;
		for (int i = 0; i < numItems; i++)
		{
			MDagPath path;
			traversal.itemPath(i, path);
			if (path.hasFn(MFn::kMesh))
			{
				fnMesh.setObject(path);
				MFloatMatrix modelWorld(path.inclusiveMatrix().matrix);
				MTransformationMatrix transformMatrix;
				MFloatMatrix modelViewProjection = modelWorld * viewProjection;
				modelViewProjection = modelViewProjection.transpose();
				MIntArray triangleCounts;
				MIntArray triangleVertices; // This is the index list for all the triangles in the mesh in one big list. Ie. first 3 are for tri 1 etc. Index into getPoints()
				fnMesh.getTriangles(triangleCounts, triangleVertices);
				//int indices[100];
				//triangleVertices.get(indices);
				MFloatPointArray vertexArray;
				//float points[1000][4];
				fnMesh.getPoints(vertexArray);
				//vertexArray.get(points);
				UserSceneRenderer::get()->render(triangleVertices, vertexArray, modelViewProjection);
			}
		}
	}
	return MStatus::kSuccess;
}
// Load the textures, update the necessary variable values, initialize register combiners,
// save and load the matrices with the proper values
//
MStatus	hwRefractReflectShader_NV20::preDraw(const MDrawRequest& request, M3dView& view)
{
	MStatus stat = loadTextures( request, view);

	if( MS::kSuccess != stat )		return stat;

	// get the reflectivity value
	//
	MPlug	tPlug(thisMObject(), reflectivity);
	if( tPlug.getValue( fReflectivity ) )
	{
		if( fReflectivity < 0.01f )	fReflectivity = 0.01f;
		if( fReflectivity > 1.0f )	fReflectivity = 1.0f;
	}
	else	fReflectivity = 0.5f;
	
	// get the refraction index value
	//
	MPlug	rPlug(thisMObject(), refractionIndex);
	if( rPlug.getValue( fRefractionIndex ) )
	{
		if ( fRefractionIndex < 1.0f )	fRefractionIndex = 1.0f;
		if ( fRefractionIndex > 2.0f )	fRefractionIndex = 2.0f;
	}
	else	fRefractionIndex = 1.0f;
	
	initCombiners( request, view );
	
	// Compute the camera rotation angle and axis
	//
	MDagPath	cameraPath;
	MStatus		status = view.getCamera( cameraPath );
	MMatrix		mmatrix = cameraPath.inclusiveMatrix( &status );
	MTransformationMatrix tmatrix( mmatrix );
	
	MQuaternion camRotation = tmatrix.rotation();
	MVector		camAxis;
	double		camTheta;
	camRotation.getAxisAngle(  camAxis, camTheta );
	
	// Convert to degrees from radians
	camTheta *= 57.295779513082320876798154814105;	// == (180 / M_PI)
	
	view.beginGL();
		glMatrixMode( GL_TEXTURE );
		glPushMatrix();
		glLoadIdentity();
		glScalef(1.0, -1.0, 1.0);
		glRotated( camTheta, camAxis[0], camAxis[1], camAxis[2]);
		glMatrixMode( GL_MODELVIEW );
	view.endGL();

	return stat;
}
Beispiel #14
0
void
UsdMayaGLBatchRenderer::ShapeRenderer::PrepareForQueue(
    const MDagPath &objPath,
    UsdTimeCode time,
    uint8_t refineLevel,
    bool showGuides,
    bool showRenderGuides,
    bool tint,
    GfVec4f tintColor )
{
    // Initialization of default parameters go here. These parameters get used
    // in all viewports and for selection.
    //
    _baseParams.frame = time;
    _baseParams.refineLevel = refineLevel;

    // XXX Not yet adding ability to turn off display of proxy geometry, but
    // we should at some point, as in usdview
    _baseParams.renderTags.clear();
    _baseParams.renderTags.push_back(HdTokens->geometry);
    _baseParams.renderTags.push_back(HdTokens->proxy);
    if (showGuides) {
        _baseParams.renderTags.push_back(HdTokens->guide);
    } 
    if (showRenderGuides) {
        _baseParams.renderTags.push_back(_tokens->render);
    }
    
    if( tint )
        _baseParams.overrideColor = tintColor;

    if( _delegate )
    {
        MStatus status;
        MMatrix transform = objPath.inclusiveMatrix( &status );
        if( status )
        {
            _rootXform = GfMatrix4d( transform.matrix );
            _delegate->SetRootTransform(_rootXform);
        }

        _delegate->SetRefineLevelFallback(refineLevel);

        // Will only react if time actually changes.
        _delegate->SetTime(time);

        _delegate->SetRootCompensation(_rootPrim.GetPath());

        if( !_isPopulated )
            _batchRenderer->_populateQueue.insert(this);
    }
}
Beispiel #15
0
//  ========== DtCameraGetInterest ==========
//
//  SYNOPSIS
//  Return the camera position.
//
int DtCameraGetInterest(int cameraID,float* xTran,float* yTran,float* zTran)
{
	// check for error
	//

	if ( (cameraID < 0) || (cameraID >= local->camera_ct) )
	{
		*xTran = 0.0;
		*yTran = 0.0;
		*zTran = 0.0;
		return(0);
	}
 
	// return values
	//
	
    MStatus stat = MS::kSuccess;

    MFnDagNode fnDagNode( local->cameras[cameraID].transformNode, &stat );
    MDagPath aPath;
    stat = fnDagNode.getPath( aPath );

    // Get the global transformation matrix of the camera node.
    //
    MMatrix globalMatrix = aPath.inclusiveMatrix();

    MFnCamera fnCamera( local->cameras[cameraID].cameraShapeNode, &stat );

    // Center of interest point: view node point.
    //
    MPoint coiPoint = fnCamera.centerOfInterestPoint( MSpace::kObject, &stat );
    double coiPos[4]; 
    if( DtExt_Debug() & DEBUG_CAMERA )
    {
        coiPoint.get( coiPos );
        cerr << "local center of interest( view point position ) is " <<
            coiPos[0] << " " << coiPos[1] << " " << coiPos[2] << " "  
             << coiPos[3] << endl;
    }        
    coiPoint *= globalMatrix;
    coiPoint.get( coiPos );

	*xTran = coiPos[0];
	*yTran = coiPos[1];
	*zTran = coiPos[2];
 
	return(1);

}  // DtCameraGetInterest //
/*
	Some simple code to draw a wireframe bounding box
	in OpenGL
*/
void
MCustomSceneDraw::drawBounds( const MDagPath &dagPath,
							  const MBoundingBox &box)
{
	MMatrix  matrix = dagPath.inclusiveMatrix();
	MPoint	minPt = box.min();
	MPoint	maxPt = box.max();

	double bottomLeftFront[3] =		{ minPt.x, minPt.y, minPt.z };
	double topLeftFront[3] = 		{ minPt.x, maxPt.y, minPt.z };
	double bottomRightFront[3] =	{ maxPt.x, minPt.y, minPt.z };
	double topRightFront[3] =		{ maxPt.x, maxPt.y, minPt.z };
	double bottomLeftBack[3] =		{ minPt.x, minPt.y, maxPt.z };
	double topLeftBack[3] =			{ minPt.x, maxPt.y, maxPt.z };
	double bottomRightBack[3] =		{ maxPt.x, minPt.y, maxPt.z };
	double topRightBack[3] =		{ maxPt.x, maxPt.y, maxPt.z };

	gGLFT->glMatrixMode( MGL_MODELVIEW );
	gGLFT->glPushMatrix();
	gGLFT->glMultMatrixd( &(matrix.matrix[0][0]) );

	gGLFT->glBegin(MGL_LINE_STRIP);
	gGLFT->glVertex3dv( bottomLeftFront );
	gGLFT->glVertex3dv( bottomLeftBack );
	gGLFT->glVertex3dv( topLeftBack );
	gGLFT->glVertex3dv( topLeftFront );
	gGLFT->glVertex3dv( bottomLeftFront );
	gGLFT->glVertex3dv( bottomRightFront );
	gGLFT->glVertex3dv( bottomRightBack);
	gGLFT->glVertex3dv( topRightBack );
	gGLFT->glVertex3dv( topRightFront );
	gGLFT->glVertex3dv( bottomRightFront );
	gGLFT->glEnd();

	gGLFT->glBegin(MGL_LINES);
	gGLFT->glVertex3dv(bottomLeftBack);
	gGLFT->glVertex3dv(bottomRightBack);

	gGLFT->glVertex3dv(topLeftBack);
	gGLFT->glVertex3dv(topRightBack);

	gGLFT->glVertex3dv(topLeftFront);
	gGLFT->glVertex3dv(topRightFront);
	gGLFT->glEnd();

	gGLFT->glPopMatrix();
}
Beispiel #17
0
MStatus V3Manipulator::connectToDependNode( const MObject & node )
{	
	MFnDagNode dagFn( node );
	MDagPath nodePath;
	dagFn.getPath( nodePath );
	
	MStatus status;
	m_translatePlug = dagFn.findPlug( m_plug.partialName(), &status );
	if( !status )
	{
		return MStatus::kFailure;
	}

	MFnFreePointTriadManip translateFn( m_translateManip );
	translateFn.connectToPointPlug( m_translatePlug );

	addManipToPlugConversionCallback( m_translatePlug, (manipToPlugConversionCallback)&V3Manipulator::vectorManipToPlugConversion );
	addPlugToManipConversionCallback( translateFn.pointIndex(), (plugToManipConversionCallback)&V3Manipulator::vectorPlugToManipConversion );
	
	MStatus stat = finishAddingManips();
	if( stat == MStatus::kFailure )
	{
		return MStatus::kFailure;
	}
	
	MPxManipContainer::connectToDependNode( node );

	readParameterOptions( dagFn );

	if( m_worldSpace)
	{
		m_localMatrix.setToIdentity();
		m_localMatrixInv.setToIdentity();
	}
	else
	{
		// Inherit any transform to the parent
		MDagPath transformPath = nodePath;
		transformPath.pop(); 
		MFnTransform transformFn( transformPath );
		m_localMatrix = transformPath.inclusiveMatrix();
		m_localMatrixInv = transformPath.inclusiveMatrixInverse();
	}
	return stat;
}
/** Create a RIB compatible representation of a Maya polygon mesh.
 */
liqRibMeshData::liqRibMeshData( MObject mesh )
: numFaces( 0 ),
  numPoints ( 0 ),
  numNormals ( 0 ),
  nverts(),
  verts(),
  vertexParam(NULL),
  normalParam(NULL)
{
	CM_TRACE_FUNC("liqRibMeshData::liqRibMeshData("<<MFnDagNode(mesh).fullPathName().asChar()<<")");

	unsigned int i;
	unsigned int j;
  areaLight = false;
  LIQDEBUGPRINTF( "-> creating mesh\n" );
  MFnMesh fnMesh( mesh );
  objDagPath = fnMesh.dagPath();
  MStatus astatus;
  
  name = fnMesh.name();
  areaLight =( liquidGetPlugValue( fnMesh, "areaIntensity", areaIntensity, astatus ) == MS::kSuccess )? true : false ; 

  if ( areaLight ) 
  {
    MDagPath meshDagPath;
    meshDagPath = fnMesh.dagPath();
    MTransformationMatrix worldMatrix = meshDagPath.inclusiveMatrix();
    MMatrix worldMatrixM = worldMatrix.asMatrix();
    worldMatrixM.get( transformationMatrix );
  }

  numPoints = fnMesh.numVertices();
  numNormals = fnMesh.numNormals();

  // UV sets -------------------
  //
  //const unsigned numSTs( fnMesh.numUVs() );
  const unsigned numUVSets( fnMesh.numUVSets() );
  MString currentUVSetName;
  MStringArray extraUVSetNames;
  fnMesh.getCurrentUVSetName( currentUVSetName );
  {
    MStringArray UVSetNames;
    fnMesh.getUVSetNames( UVSetNames );

    for ( unsigned i( 0 ); i<numUVSets; i++ ) 
      if ( UVSetNames[i] != currentUVSetName ) 
        extraUVSetNames.append( UVSetNames[i] );
  }

  numFaces = fnMesh.numPolygons();
  const unsigned numFaceVertices( fnMesh.numFaceVertices() );

	if ( numPoints < 1 )
	{
//		MGlobal::displayInfo( MString( "fnMesh: " ) + fnMesh.name() );
//		cerr << "Liquid : Could not export degenerate mesh '"<< fnMesh.fullPathName( &astatus ).asChar() << "'" << endl << flush;
		return;
	}

  unsigned face = 0;
  unsigned faceVertex = 0;
  unsigned count;
  unsigned vertex;
  unsigned normal;
  float S;
  float T;
  MPoint point;
  liqTokenPointer pointsPointerPair;
  liqTokenPointer normalsPointerPair;
  liqTokenPointer pFaceVertexSPointer;
  liqTokenPointer pFaceVertexTPointer;

  // Allocate memory and tokens
  numFaces = numFaces;
  nverts = shared_array< liqInt >( new liqInt[ numFaces ] );
  verts = shared_array< liqInt >( new liqInt[ numFaceVertices ] );

  pointsPointerPair.set( "P", rPoint, numPoints );
  pointsPointerPair.setDetailType( rVertex );

  if ( numNormals == numPoints ) 
  {
    normalsPointerPair.set( "N", rNormal, numPoints );
    normalsPointerPair.setDetailType( rVertex );
  } 
  else 
  {
    normalsPointerPair.set( "N", rNormal, numFaceVertices );
    normalsPointerPair.setDetailType( rFaceVarying );
  }
  	
  // uv
  std::vector<liqTokenPointer> UVSetsArray;
  UVSetsArray.reserve( 1 + extraUVSetNames.length() );

  liqTokenPointer currentUVSetUPtr;
  liqTokenPointer currentUVSetVPtr;
  liqTokenPointer currentUVSetNamePtr;
  liqTokenPointer extraUVSetsUPtr;
  liqTokenPointer extraUVSetsVPtr;
  liqTokenPointer extraUVSetsNamePtr;
  if(liqglo.liqglo_outputMeshAsRMSArrays)
  {
	  currentUVSetUPtr.set( "s", rFloat, numFaceVertices );
	  currentUVSetUPtr.setDetailType( rFaceVarying );

	  currentUVSetVPtr.set( "t", rFloat, numFaceVertices );
	  currentUVSetVPtr.setDetailType( rFaceVarying );

	  currentUVSetNamePtr.set( "currentUVSet", rString, 1 );
	  currentUVSetNamePtr.setDetailType( rConstant );

	  if( numUVSets > 1 )
	  {
		  extraUVSetsUPtr.set( "u_uvSet", rFloat, numFaceVertices, numUVSets-1 );
		  extraUVSetsUPtr.setDetailType( rFaceVarying );

		  extraUVSetsVPtr.set( "v_uvSet", rFloat, numFaceVertices, numUVSets-1 );
		  extraUVSetsVPtr.setDetailType( rFaceVarying );

		  extraUVSetsNamePtr.set( "extraUVSets", rString, numUVSets-1 );
		  extraUVSetsNamePtr.setDetailType( rConstant );
	  }
  }
  else
  {
	  if ( numUVSets > 0 ) 
	  {
		liqTokenPointer pFaceVertexPointerPair;

		pFaceVertexPointerPair.set( "st", rFloat, numFaceVertices, 2 );
		pFaceVertexPointerPair.setDetailType( rFaceVarying );

		UVSetsArray.push_back( pFaceVertexPointerPair );

		for ( unsigned j( 0 ); j<extraUVSetNames.length(); j++) 
		{
		  liqTokenPointer pFaceVertexPointerPair;

		  pFaceVertexPointerPair.set( extraUVSetNames[j].asChar(), rFloat, numFaceVertices, 2 );
		  pFaceVertexPointerPair.setDetailType( rFaceVarying );

		  UVSetsArray.push_back( pFaceVertexPointerPair );
		}

		if( liqglo.liqglo_outputMeshUVs ) 
		{
		  // Match MTOR, which also outputs face-varying STs as well for some reason - Paul
		  // not anymore - Philippe
		  pFaceVertexSPointer.set( "u", rFloat, numFaceVertices );
		  pFaceVertexSPointer.setDetailType( rFaceVarying );

		  pFaceVertexTPointer.set( "v", rFloat, numFaceVertices );
		  pFaceVertexTPointer.setDetailType( rFaceVarying );
		}
	  }
  }

  vertexParam = pointsPointerPair.getTokenFloatArray();
  normalParam = normalsPointerPair.getTokenFloatArray();

  // Read the mesh from Maya
  MFloatVectorArray normals;
  fnMesh.getNormals( normals );

  for ( MItMeshPolygon polyIt ( mesh ); polyIt.isDone() == false; polyIt.next() ) 
  {
    count = polyIt.polygonVertexCount();
    nverts[face] = count;
	for( i=0; i<count; i++ )    // boucle sur les vertex de la face
    {
      vertex = polyIt.vertexIndex( i );
      verts[faceVertex] = vertex;
      point = polyIt.point( i, MSpace::kObject );
      pointsPointerPair.setTokenFloat( vertex, point.x, point.y, point.z );
      normal = polyIt.normalIndex( i );

      if( numNormals == numPoints ) 
        normalsPointerPair.setTokenFloat( vertex, normals[normal].x, normals[normal].y, normals[normal].z );
      else 
        normalsPointerPair.setTokenFloat( faceVertex, normals[normal].x, normals[normal].y, normals[normal].z );

	  if( liqglo.liqglo_outputMeshAsRMSArrays )
	  {
		  for( j=0; j<numUVSets; j++ )
		  {
			  if(j==0)
			  {
				  MString uvSetName = currentUVSetName;
				  // set uvSet name
				  currentUVSetNamePtr.setTokenString( 0, currentUVSetName.asChar() );
				  // set uv values
				  fnMesh.getPolygonUV( face, i, S, T, &uvSetName );

				  currentUVSetUPtr.setTokenFloat( faceVertex, S );
				  currentUVSetVPtr.setTokenFloat( faceVertex, 1-T );
			  }
			  else
			  {
				  MString uvSetName = extraUVSetNames[j-1];
				  // set uvSet name
				  extraUVSetsNamePtr.setTokenString( j-1, extraUVSetNames[j-1].asChar() );
				  // set uv values
				  fnMesh.getPolygonUV( face, i, S, T, &uvSetName );
				  extraUVSetsUPtr.setTokenFloat( (numFaceVertices*(j-1)) + faceVertex, S );
				  extraUVSetsVPtr.setTokenFloat( (numFaceVertices*(j-1)) + faceVertex, 1-T );
			  }
		  }
	  }
	  else
	  {
		  if ( numUVSets ) 
		  {
			  for( j=0; j<numUVSets; j++ )
			  {
				  MString uvSetName;
				  if(j==0)
				  {
					  uvSetName = currentUVSetName;
				  }
				  else
				  {
					  uvSetName = extraUVSetNames[j-1];
				  }
				  fnMesh.getPolygonUV( face, i, S, T, &uvSetName );
				  UVSetsArray[j].setTokenFloat( faceVertex, 0, S );
				  UVSetsArray[j].setTokenFloat( faceVertex, 1, 1-T );
				  //printf("V%d  %s : %f %f  =>  %f %f \n", i, uvSetName.asChar(), S, T, S, 1-T);

				  if( liqglo.liqglo_outputMeshUVs && j==0)
				  {
					  // Match MTOR, which always outputs face-varying STs as well for some reason - Paul
					  pFaceVertexSPointer.setTokenFloat( faceVertex, S );
					  pFaceVertexTPointer.setTokenFloat( faceVertex, 1-T );
				  }
			  }
		  }
		}
      // printf( "[%d] faceVertex = %d  vertex = %d\n", i, faceVertex, vertex );

      ++faceVertex;
    }
    ++face;
  }
  // Add tokens to array and clean up after
  tokenPointerArray.push_back( pointsPointerPair );
  tokenPointerArray.push_back( normalsPointerPair );

  if(liqglo.liqglo_outputMeshAsRMSArrays)
  {
	  tokenPointerArray.push_back( currentUVSetNamePtr );
	  tokenPointerArray.push_back( currentUVSetUPtr );
	  tokenPointerArray.push_back( currentUVSetVPtr );
	  if( numUVSets > 1 )
	  {
		  tokenPointerArray.push_back( extraUVSetsNamePtr );
		  tokenPointerArray.push_back( extraUVSetsUPtr );
		  tokenPointerArray.push_back( extraUVSetsVPtr );
	  }
  }
  else
  {
	  if( UVSetsArray.size() ) 
		  tokenPointerArray.insert( tokenPointerArray.end(), UVSetsArray.begin(), UVSetsArray.end() );

	  if( liqglo.liqglo_outputMeshUVs ) 
	  {
		  tokenPointerArray.push_back( pFaceVertexSPointer );
		  tokenPointerArray.push_back( pFaceVertexTPointer );
	  }
  }

  addAdditionalSurfaceParameters( mesh );
}
Beispiel #19
0
/** Create a RIB compatible representation of a Maya pfxToon node as RiCurves.
 */
liqRibPfxToonData::liqRibPfxToonData( MObject pfxToon )
  : nverts(),
    CVs(),
    curveWidth(),
    cvColor(),
    cvOpacity()
{
  LIQDEBUGPRINTF( "-> creating pfxToon curves\n" );
  MStatus status( MS::kSuccess );

  // Update the pfxToon node with the renderCamera's position
  // otherwise the resulting outline might be incorrect
  MDagPath cameraPath;
  MSelectionList camList;
  camList.add( liqglo_renderCamera );
  camList.getDagPath( 0, cameraPath );
  MMatrix cam_mat( cameraPath.inclusiveMatrix() );
  MFnDependencyNode pfxToonNode( pfxToon );
  pfxToonNode.findPlug( "cameraPointX" ).setValue( cam_mat( 3, 0 ) );
  pfxToonNode.findPlug( "cameraPointY" ).setValue( cam_mat( 3, 1 ) );
  pfxToonNode.findPlug( "cameraPointZ" ).setValue( cam_mat( 3, 2 ) );


  MFnPfxGeometry pfxtoon( pfxToon, &status );

  if ( status == MS::kSuccess ) 
	{
    MRenderLineArray profileArray;
    MRenderLineArray creaseArray;
    MRenderLineArray intersectionArray;

    bool doLines          = true;
    bool doTwist          = false;
    bool doWidth          = true;
    bool doFlatness       = false;
    bool doParameter      = false;
    bool doColor          = true;
    bool doIncandescence  = false;
    bool doTransparency   = true;
    bool doWorldSpace     = false;

    status = pfxtoon.getLineData( profileArray, creaseArray, intersectionArray, doLines, doTwist, doWidth, doFlatness, doParameter, doColor, doIncandescence, doTransparency, doWorldSpace );

    if ( status == MS::kSuccess ) 
    {
      // Het the lines and fill the arrays.
      ncurves = profileArray.length();
      {
        MFnDependencyNode pfxNode( pfxToon );
        MString info( "[liquid] pfxToon node " );
        info += pfxNode.name() + " : " + ncurves + " curves.";
        cout << info << endl << flush;
      }

      unsigned totalNumberOfVertices( 0 );

      if ( ncurves > 0 ) 
      {
        nverts = shared_array< RtInt >( new RtInt[ ncurves ] );

        // Calculate storage requirments.
        // This is a lot more efficient than all those reallocs()
        // (or resize()s if we used a vector) that were done before
        // in the main loop below.
        for ( unsigned i( 0 ); i < ncurves; i++ ) 
				{
          MRenderLine theLine( profileArray.renderLine( i, &status ) );
          if ( MS::kSuccess == status ) 
					{
            MVectorArray vertices( theLine.getLine() );
            totalNumberOfVertices += vertices.length();
          }
        }

        // Allocate memory
        CVs = shared_array< RtFloat >( new RtFloat[ totalNumberOfVertices * 3 ] );
        if ( !CVs ) 
				{
          MString err( "liqRibPfxToonData failed to allocate CV memory!" );
          cout << err << endl << flush;
          throw( err );
          return;
        }

        curveWidth = shared_array< RtFloat >( new RtFloat[ totalNumberOfVertices ] );
        if ( !curveWidth ) 
				{
          MString err( "liqRibPfxToonData failed to allocate per vertex width memory!" );
          cout << err << endl << flush;
          throw( err );
          return;
        }

        cvColor = shared_array< RtFloat >( new RtFloat[ totalNumberOfVertices * 3 ] );
        if ( !cvColor ) 
				{
          MString err( "liqRibPfxToonData failed to allocate CV color memory!" );
          cout << err << endl << flush;
          throw(err);
          return;
        }

        cvOpacity = shared_array< RtFloat >( new RtFloat[ totalNumberOfVertices * 3 ] );
        if ( !cvOpacity ) 
				{
          MString err("liqRibPfxToonData failed to allocate CV opacity memory !");
          cout << err << endl << flush;
          throw( err );
          return;
        }

        RtFloat* cvPtr;
        RtFloat* widthPtr;
        RtFloat* colorPtr;
        RtFloat* opacityPtr;

        totalNumberOfVertices = 0;

        for ( unsigned i( 0 ); i < ncurves; i++ ) 
        {
          MRenderLine theLine( profileArray.renderLine( i, &status ) );

          if ( MS::kSuccess == status ) {

            const MVectorArray& vertices(           theLine.getLine() );
            const MDoubleArray& width(              theLine.getWidth() );
            const MVectorArray& vertexColor(        theLine.getColor() );
            const MVectorArray& vertexTransparency( theLine.getTransparency() );

            //cout <<"line "<<i<<" contains "<<vertices.length()<<" vertices."<<endl;
            //cout <<vertexColor<<endl;

            nverts[i] = vertices.length();
            totalNumberOfVertices += vertices.length();

            cvPtr      = CVs.get()        + ( totalNumberOfVertices * 3 - nverts[ i ] * 3 ) ;
            widthPtr   = curveWidth.get() + ( totalNumberOfVertices     - nverts[ i ] ) ;
            colorPtr   = cvColor.get()    + ( totalNumberOfVertices * 3 - nverts[ i ] * 3 ) ;
            opacityPtr = cvOpacity.get()  + ( totalNumberOfVertices * 3 - nverts[ i ] * 3 ) ;

            for ( unsigned vertIndex( 0 ); vertIndex < vertices.length(); vertIndex++ ) 
            {
              *cvPtr++      = ( RtFloat ) vertices[ vertIndex ].x;
              *cvPtr++      = ( RtFloat ) vertices[ vertIndex ].y;
              *cvPtr++      = ( RtFloat ) vertices[ vertIndex ].z;

              *widthPtr++   = ( RtFloat )width[ vertIndex ];

              *colorPtr++   = ( RtFloat )vertexColor[ vertIndex ].x ;
              *colorPtr++   = ( RtFloat )vertexColor[ vertIndex ].y ;
              *colorPtr++   = ( RtFloat )vertexColor[ vertIndex ].z ;

              *opacityPtr++ = ( RtFloat )( 1.0f - vertexTransparency[ vertIndex ].x ) ;
              *opacityPtr++ = ( RtFloat )( 1.0f - vertexTransparency[ vertIndex ].y ) ;
              *opacityPtr++ = ( RtFloat )( 1.0f - vertexTransparency[ vertIndex ].z ) ;
            }
          }
        }

        // Store for output
        liqTokenPointer points_pointerPair;
        if ( !points_pointerPair.set( "P", rPoint, true, false, totalNumberOfVertices ) ) 
				{
          MString err( "liqRibPfxToonData: liqTokenPointer failed to allocate CV memory !" );
          cout << err << endl;
          throw(err);
          return;
        }
        points_pointerPair.setDetailType( rVertex );
        points_pointerPair.setTokenFloats( CVs );
        tokenPointerArray.push_back( points_pointerPair );

        // Store width params
        liqTokenPointer width_pointerPair;
        if ( !width_pointerPair.set( "width", rFloat, true, false, totalNumberOfVertices ) ) 
				{
          MString err("liqRibPfxToonData: liqTokenPointer failed to allocate width memory !");
          cout <<err<<endl;
          throw(err);
          return;
        }
        width_pointerPair.setDetailType( rVarying );
        width_pointerPair.setTokenFloats( curveWidth );
        tokenPointerArray.push_back( width_pointerPair );

        // Store color params
        liqTokenPointer color_pointerPair;
        if ( !color_pointerPair.set( "pfxToon_vtxColor", rColor, true, false, totalNumberOfVertices ) ) 
				{
          MString err("liqRibPfxToonData: liqTokenPointer failed to allocate color memory !");
          cout <<err<<endl;
          throw(err);
          return;
        }
        color_pointerPair.setDetailType( rVertex );
        color_pointerPair.setTokenFloats( cvColor );
        tokenPointerArray.push_back( color_pointerPair );

        // Store opacity params
        liqTokenPointer opacity_pointerPair;
        if ( !opacity_pointerPair.set( "pfxToon_vtxOpacity", rColor, true, false, totalNumberOfVertices ) ) 
				{
          MString err("liqRibPfxToonData: liqTokenPointer failed to allocate opacity memory !");
          cout <<err<<endl<<flush;
          throw(err);
          return;
        }
        opacity_pointerPair.setDetailType( rVertex );
        opacity_pointerPair.setTokenFloats( cvOpacity );
        tokenPointerArray.push_back( opacity_pointerPair );

        addAdditionalSurfaceParameters( pfxToon );

      }
    }
  }
}
HRESULT CMayaManager::PerspectiveCamera_Synchronize()
{
    MDagPath MayaCamera;
    M3dView panel;
    for(UINT iView= 0; iView < M3dView::numberOf3dViews(); iView++)
    {
        D3DXMATRIX mCamera;
        M3dView::get3dView(iView, panel);
        panel.getCamera(MayaCamera);
        MayaCamera.pop();

        MString perspNameStr( "persp" );
        MString cameraNameStr = MayaCamera.partialPathName();

        cameraNameStr = cameraNameStr.substring(0, perspNameStr.length()-1 );
        const char* cameraName= cameraNameStr.asChar();

        if(cameraNameStr == perspNameStr )
        {
            MayaCamera.extendToShape();
            MFloatMatrix fView(MayaCamera.inclusiveMatrix().matrix );

            ConvertWorldMatrix(mCamera, fView);

            panel.getCamera(MayaCamera);
            MFnCamera fnMayaCamera(MayaCamera.node());

            MVector mUp= fnMayaCamera.upDirection();
            MVector mAt= fnMayaCamera.viewDirection();
            MPoint mEye= fnMayaCamera.eyePoint(MSpace::kWorld);

            D3DXVECTOR3 dxEye( (float)mEye.x, (float)mEye.y, (float)-mEye.z );
            D3DXVECTOR3 dxAt( (float)mAt.x, (float)mAt.y, (float)-mAt.z );
            D3DXVECTOR3 dxUp( (float)mUp.x, (float)mUp.y, (float)-mUp.z );
            D3DXVECTOR4 fEye;
            D3DXVECTOR4 fAt;
            D3DXVECTOR3 fUp;

            D3DXVec3Transform(&fEye, &dxEye,(D3DXMATRIX*)&mCamera);
            D3DXVec3Transform(&fAt, &dxAt,(D3DXMATRIX*)&mCamera);
            D3DXVec3TransformNormal(&fUp, &dxUp,(D3DXMATRIX*)&mCamera);


            D3DXMatrixLookAtLH(&PerspectiveCamera_View,
                               (D3DXVECTOR3*)&fEye,
                               (D3DXVECTOR3*)&fAt,
                               &fUp);

            // Projection matrix
            float zNear = (float)fnMayaCamera.nearClippingPlane();
            float zFar = (float)fnMayaCamera.farClippingPlane();
            float hFOV = (float)fnMayaCamera.horizontalFieldOfView();
            float f = (float) (1.0f / (float) tan( hFOV / 2.0f ));

            ZeroMemory( &PerspectiveCamera_Projection, sizeof(PerspectiveCamera_Projection) );
            PerspectiveCamera_Projection._11 = f;
            PerspectiveCamera_Projection._22 = f;
            PerspectiveCamera_Projection._33 = (zFar+zNear) / (zFar-zNear);
            PerspectiveCamera_Projection._34 = 1.0f;
            PerspectiveCamera_Projection._43 = -2 * (zFar*zNear)/(zFar-zNear);

            break;
        }
    }
    return S_OK;
}
Beispiel #21
0
/*

	emits particles with color sampled from specified
	shading node/shading engine

*/
MStatus sampleParticles::doIt( const MArgList& args )
{
	unsigned int i;
	bool shadow = 0;
	bool reuse = 0;

	for ( i = 0; i < args.length(); i++ )
		if ( args.asString(i) == MString("-shadow") || 
			args.asString(i) == MString("-s") )
			shadow = 1;
		else if ( args.asString(i) == MString("-reuse") || 
			args.asString(i) == MString("-r") )
			reuse = 1;
		else
			break;
	if ( args.length() - i < 5 )
	{
		displayError( "Usage: sampleParticles [-shadow|-reuse] particleName <shadingEngine|shadingNode.plug> resX resY scale\n"
			"  Example: sampleParticles -shadow particle1 phong1SG 64 64 10;\n"
			"  Example: sampleParticles particle1 file1.outColor 128 128 5;\n" );
		return MS::kFailure;
	}
	if ( reuse && !shadow )	// can only reuse if shadow is turned on
		reuse = 0;

	MString particleName = args.asString( i );
	MString node = args.asString( i+1 );
	int resX = args.asInt( i+2 );
	int resY = args.asInt( i+3 );
	double scale = args.asDouble( i+4 );

	if ( scale <= 0.0 )
		scale = 1.0;

	MFloatArray uCoord, vCoord;
	MFloatPointArray points;
	MFloatVectorArray normals, tanUs, tanVs;

	if ( resX <= 0 )
		resX = 1;
	if ( resY <= 0 )
		resY = 1;

	MString command( "emit -o " );
	command += particleName;
	char tmp[2048];

	float stepU = (float) (1.0 / resX);
	float stepV = (float) (1.0 / resY);

	// stuff sample data by iterating over grid
	// Y is set to arch along the X axis

	int x, y;
	for ( y = 0; y < resY; y++ )
		for ( x = 0; x < resX; x++ )
		{
			uCoord.append( stepU * x );
			vCoord.append( stepV * y );

			float curY = (float) (sin( stepU * (x) * M_PI )*2.0);

			MFloatPoint curPt(
				(float) (stepU * x * scale),
				curY,
				(float) (stepV * y * scale ));

			MFloatPoint uPt(
				(float) (stepU * (x+1) * scale),
				(float) (sin( stepU * (x+1) * M_PI )*2.0),
				(float) (stepV * y * scale ));

			MFloatPoint vPt(
				(float) (stepU * (x) * scale),
				curY,
				(float) (stepV * (y+1) * scale ));

			MFloatVector du, dv, n;
			du = uPt-curPt;
			dv = vPt-curPt;

			n = dv^du;	// normal is based on dU x dV
			n = n.normal();
			normals.append( n );

			du.normal();
			dv.normal();
			tanUs.append( du );
			tanVs.append( dv );

			points.append( curPt );
		}

	// get current camera's world matrix

	MDagPath cameraPath;
	M3dView::active3dView().getCamera( cameraPath );
	MMatrix mat = cameraPath.inclusiveMatrix();
	MFloatMatrix cameraMat( mat.matrix );

	MFloatVectorArray colors, transps;
	if ( MS::kSuccess == MRenderUtil::sampleShadingNetwork( 
			node, 
			points.length(),
			shadow,
			reuse,

			cameraMat,

			&points,
			&uCoord,
			&vCoord,
			&normals,
			&points,
			&tanUs,
			&tanVs,
			NULL,	// don't need filterSize

			colors,
			transps ) )
	{
		fprintf( stderr, "%u points sampled...\n", points.length() );
		for ( i = 0; i < uCoord.length(); i++ )
		{
			sprintf( tmp, " -pos %g %g %g -at velocity -vv %g %g %g -at rgbPP -vv %g %g %g",
				points[i].x,
				points[i].y,
				points[i].z,

				normals[i].x,
				normals[i].y,
				normals[i].z,

				colors[i].x,
				colors[i].y,
				colors[i].z );

			command += MString( tmp );

			// execute emit command once every 512 samples
			if ( i % 512 == 0 )
			{
				fprintf( stderr, "%u...\n", i );
				MGlobal::executeCommand( command, false, false );
				command = MString( "emit -o " );
				command += particleName;
			}
		}

		if ( i % 512 )
			MGlobal::executeCommand( command, true, true );
	}
	else
	{
		displayError( node + MString(" is not a shading engine!  Specify node.attr or shading group node." ) );
	}

	return MS::kSuccess;
}
Beispiel #22
0
//  ========== DtCameraGetMatrix ==========
//
//  SYNOPSIS
//	Return the camera transformation matrix. This matrix
// 	includes the camera rotation, translation, and scale
//	transforms. This function also sets the valid bits
//	for DT_CAMERA_POSITION and DT_CAMERA_ORIENTATION.
//	The matrix is in row-major order.
//
//  From PA DT:
//          Not implemented: returns a pointer to an identity matrix
//          under the OpenModel implementation.
//
//  For Maya DT:
//          This fuction returns the camera's global transformation matrix.
//
int DtCameraGetMatrix( int cameraID,
					   float** matrix )
{
	// static float mtx[4][4];
	static float globalMtx[4][4];
	static float localMtx[4][4]; 

    // Check for error.
    //
	if( ( cameraID < 0) || ( cameraID >= local->camera_ct ) )
	{
		*matrix = NULL;
		return( 0 );
	}

    // Set the valid flag.
    //
	// local->cameras[cameraID].valid_bits|=(DT_VALID_BIT_MASK&DT_CAMERA_MATRIX);

    // Get transformations.
    //
#if 0
	mtx[0][0]=1.0;mtx[0][1]=0.0;mtx[0][2]=0.0;mtx[0][3]=0.0;
	mtx[1][0]=0.0;mtx[1][1]=1.0;mtx[1][2]=0.0;mtx[1][3]=0.0;
	mtx[2][0]=0.0;mtx[2][1]=0.0;mtx[2][2]=1.0;mtx[2][3]=0.0;
	mtx[3][0]=0.0;mtx[3][1]=0.0;mtx[3][2]=0.0;mtx[3][3]=1.0;
#endif

	// Camera transformation matrix is set on the transform node.
	//	
	MStatus returnStatus = MS::kSuccess;
	MFnDagNode fnTransNode( local->cameras[cameraID].transformNode, &returnStatus );
	MDagPath dagPath;
	returnStatus = fnTransNode.getPath( dagPath );
	if( MS::kSuccess == returnStatus )
	{
		if( DtExt_Debug() & DEBUG_CAMERA )
		{
			cerr << "Got the dagPath\n";
			cerr << "length of the dagpath is " << dagPath.length() << endl;
		}
	}

	MFnDagNode fnDagPath( dagPath, &returnStatus );

	MMatrix localMatrix;
	MMatrix globalMatrix;

	localMatrix = fnTransNode.transformationMatrix ( &returnStatus );
	globalMatrix = dagPath.inclusiveMatrix();

	localMatrix.get( localMtx );
	globalMatrix.get( globalMtx );

	if( DtExt_Debug() & DEBUG_CAMERA )
	{
		int i = 0;
		int j = 0;

		cerr << "camera's global transformation matrix:\n";
		
		for( i = 0; i < 4; i++ )
		{
			for( j = 0; j < 4; j++ )
			{
				cerr << globalMtx[i][j] << " ";
			}
			cerr << endl;
		}	

		cerr << "camera's local transformation matrix:\n";

		for( i = 0; i < 4; i++ )
		{
			for( j = 0; j < 4; j++ )
			{
				cerr << localMtx[i][j] << " ";
			}
			cerr << endl;
		}	
	}

	// *matrix = (float*)&mtx;
	*matrix = (float*)&globalMtx;

	return(1);
}  // DtCameraGetMatrix //
Beispiel #23
0
//  ========== DtCameraGetUpPosition ==========
//
//  SYNOPSIS
//  Return the camera Up position.
//
int DtCameraGetUpPosition( int cameraID,
                         float* xTran,
                         float* yTran,
                         float* zTran )
{                        
    // Check for error.  
    //
    if( (cameraID < 0) || (cameraID >= local->camera_ct ) )
    {
        *xTran = 0.0;
        *yTran = 0.0;
        *zTran = 0.0;
        return( 0 );
    }   
    
    MStatus stat = MS::kSuccess;
    
    MFnDagNode fnDagNode( local->cameras[cameraID].transformNode, &stat );
    MDagPath aPath;
    stat = fnDagNode.getPath( aPath );
    
    // Get the global transformation matrix of the camera node.
    //
    MMatrix globalMatrix = aPath.inclusiveMatrix();
    
    MFnCamera fnCamera( local->cameras[cameraID].cameraShapeNode, &stat );
    
    MPoint eyePt;
    double eyePos[4];
    double upPos[4];
    
    if( MS::kSuccess == stat )
    {
        eyePt = fnCamera.eyePoint( MSpace::kObject, &stat );
        
        eyePt *= globalMatrix;
        eyePt.get( eyePos );

	    MVector upVec = fnCamera.upDirection( MSpace::kObject, &stat );
		upVec *= globalMatrix;
		
	    if( DtExt_Debug() & DEBUG_CAMERA )
	    {
	        double up[3];
	        upVec.get( up );
	        cerr << "local up vector is " << up[0] << " " 
										<< up[1] << " " << up[2] << endl; 
    	}

    	MPoint upPoint = eyePt + upVec;
    	upPoint.get( upPos );
    	if( DtExt_Debug() & DEBUG_CAMERA )
    	{
    	    cerr << "global up point position is " 
						<< upPos[0] << " " << upPos[1]
            			<< " " << upPos[2] << " " << upPos[3] << endl;
    	}        
    	
    }   
    else
    {
        DtExt_Err( "Error in getting the camera function set\n" );
    }   

    *xTran = upPos[0];
    *yTran = upPos[1];
    *zTran = upPos[2];

	return(1);

}
void GlobalComponent::updateComponent(MDGModifier & dgMod,bool forceUpdate, bool globalPos) {
    MStatus status;
    if( !this->m_metaDataNode.isNull() ) {
        //get the rig name
        MFnDependencyNode metaDataNodeFn( m_metaDataNode );
        MString metaNodeName = metaDataNodeFn.name();
        MStringArray nameArray;
        metaNodeName.split('_', nameArray);
        MString rigName = nameArray[1];
        //get the controller name
        MString controllerName = nameArray[2];
        MString compXmlName = this->m_pCompGuide->getName();
        //update names of component objects
        if( rigName != this->m_rigName || controllerName != compXmlName ) {
            //set the metadata node name
            lrutils::stringReplaceAll(metaNodeName, rigName, this->m_rigName);
            lrutils::stringReplaceAll(metaNodeName, controllerName, this->m_pCompGuide->getName());
            metaDataNodeFn.setName(metaNodeName);
            //set controller object name
            MObject globalCtlObj;
            lrutils::getMetaNodeConnection(this->m_metaDataNode, globalCtlObj, "controller");
            MFnDependencyNode globalCtlFn( globalCtlObj );
            MString globalCtlName = globalCtlFn.name();
            lrutils::stringReplaceAll(globalCtlName, rigName, this->m_rigName);
            lrutils::stringReplaceAll(globalCtlName, controllerName, this->m_pCompGuide->getName());
            globalCtlFn.setName(globalCtlName);
            //set controller group object name
            MObject globalCtlGroupObj;
            lrutils::getMetaNodeConnection(this->m_metaDataNode, globalCtlGroupObj, "controllerGroup");
            MFnDependencyNode globalCtlGroupFn( globalCtlGroupObj );
            MString globalCtlGroupName = globalCtlGroupFn.name();
            lrutils::stringReplaceAll(globalCtlGroupName, rigName, this->m_rigName);
            lrutils::stringReplaceAll(globalCtlGroupName, controllerName, this->m_pCompGuide->getName());
            globalCtlGroupFn.setName(globalCtlGroupName);
            //set rigParentConstraint object name
            MObject rigParentConstraintObj;
            lrutils::getMetaNodeConnection(this->m_metaDataNode, rigParentConstraintObj, "rigParentConstraint");
            MFnDependencyNode rigParentConstraintFn( rigParentConstraintObj );
            MString rigParentConstraintName = rigParentConstraintFn.name();
            lrutils::stringReplaceAll(rigParentConstraintName, rigName, this->m_rigName);
            lrutils::stringReplaceAll(rigParentConstraintName, controllerName, this->m_pCompGuide->getName());
            rigParentConstraintFn.setName(rigParentConstraintName);
            //set rigScaleConstraint object name
            MObject rigScaleConstraintObj;
            lrutils::getMetaNodeConnection(this->m_metaDataNode, rigScaleConstraintObj, "rigScaleConstraint");
            MFnDependencyNode rigScaleConstraintFn( rigScaleConstraintObj );
            MString rigScaleConstraintName = rigScaleConstraintFn.name();
            lrutils::stringReplaceAll(rigScaleConstraintName, rigName, this->m_rigName);
            lrutils::stringReplaceAll(rigScaleConstraintName, controllerName, this->m_pCompGuide->getName());
            rigScaleConstraintFn.setName(rigScaleConstraintName);
            //set noTransformScaleConstraint object name
            MObject noTransformScaleConstraintObj;
            lrutils::getMetaNodeConnection(this->m_metaDataNode, noTransformScaleConstraintObj, "noTransformScaleConstraint");
            MFnDependencyNode noTransformScaleConstraintFn( noTransformScaleConstraintObj );
            MString noTransformScaleConstraintName = noTransformScaleConstraintFn.name();
            lrutils::stringReplaceAll(noTransformScaleConstraintName, rigName, this->m_rigName);
            lrutils::stringReplaceAll(noTransformScaleConstraintName, controllerName, this->m_pCompGuide->getName());
            noTransformScaleConstraintFn.setName(noTransformScaleConstraintName); 
        }
        //update component settings, if the version increment is raised
        //or force update is true
        MPlug versionPlug = metaDataNodeFn.findPlug( "version" );
        float nodeVersion; 
        versionPlug.getValue(nodeVersion);
        if( (this->m_pCompGuide->getVersion() > nodeVersion) || forceUpdate ) {
            versionPlug.setValue( this->m_pCompGuide->getVersion() );
            //make a new controller object based upon the xml settings    
            GlobalComponentGuidePtr globalGuide = boost::dynamic_pointer_cast<GlobalComponentGuide>(this->m_pCompGuide);
            MString ctlColor = globalGuide->getColor();
            MString ctlIcon = globalGuide->getIcon();

            MGlobal::executeCommand( "python(\"control = rig101().rig101WCGetByName('" + ctlIcon + "')\");" );
            MGlobal::executeCommand( "python(\"Utils.setControllerColor(control, '" + ctlColor + "')\");" );
            MCommandResult res;
            MGlobal::executeCommand( MString("python(\"control.fullPath()\");"), res );        
            MString sResult;
            res.getResult(sResult);
            MObject ctlObj;
            MStatus status = lrutils::getObjFromName(sResult, ctlObj);
            MyCheckStatus(status, "lrutils::getObjFromName() failed");
            //apply the scale of the controller location to the new shape
            MVectorArray ctlLocation = this->m_pCompGuide->getLocation(0);
            MFnTransform ctlFn( ctlObj );
            lrutils::setLocation(ctlObj, ctlLocation, MFnTransform::MFnTransform(), false, false, true);


            //get the global transforms of the controller for all keyframes and save them for later use
            MObject oldCtlObj;
            status = lrutils::getMetaNodeConnection( this->m_metaDataNode, oldCtlObj, "controller" );
            MyCheckStatus(status, "getMetaNodeConnection() failed");
            MFnTransform oldCtlFn( oldCtlObj );
            std::map<double, MMatrix> oldCtlWorldMatrices;
            if(globalPos) {
                status = lrutils::getAllWorldTransforms(oldCtlObj, oldCtlWorldMatrices);
                MyCheckStatus(status, "lrutils::getAllWorldTransforms() failed");
            }

            //get the shape node of the original controller object
            MStringArray sResults;
            MGlobal::executeCommand( "listRelatives -s -fullPath "+oldCtlFn.name()+";", sResults );
            MString oldCtlShapePath = sResults[0];
            MGlobal::executeCommand( "listRelatives -s -path "+oldCtlFn.name()+";", sResults );
            MString oldCtlShapeName = sResults[0];
            MObject oldCtlShapeObj; lrutils::getObjFromName(oldCtlShapePath, oldCtlShapeObj);
            //delete the old shape node
            MGlobal::deleteNode( oldCtlShapeObj );
            //get the new shape node
            MGlobal::executeCommand( "listRelatives -s -fullPath "+ctlFn.name()+";", sResults );
            MString ctlShapePath = sResults[0];
            MObject ctlShapeObj; lrutils::getObjFromName(ctlShapePath, ctlShapeObj);
            //instance the new shape node under the old controller node
            MString command = "parent -s -add " + ctlShapePath + " " + oldCtlFn.name() + ";";
            MGlobal::executeCommand( command );
            MFnDependencyNode ctlShapeFn( ctlShapeObj );
            ctlShapeFn.setName( oldCtlShapeName );
            //set the old controller group translation to the new location
            MObject oldCtlGroupObj;
            lrutils::getMetaNodeConnection( this->m_metaDataNode, oldCtlGroupObj, "controllerGroup" );
            MFnTransform oldCtlGroupFn( oldCtlGroupObj );
            //save the original old controller position
            MTransformationMatrix oldXForm = oldCtlGroupFn.transformation();
            lrutils::setLocation(oldCtlGroupObj, ctlLocation, oldCtlGroupFn, true, true, false);
            //compute the inverse transformation matrix of the old control group
            MTransformationMatrix oldCtlGrpXform = oldCtlGroupFn.transformation();
            MTransformationMatrix inverseXform = MTransformationMatrix(oldCtlGrpXform.asMatrixInverse());
            //set the target offset for the rigParentConstraint node
            lrutils::getMetaNodeConnection(this->m_metaDataNode, this->m_rigParentConstraint, "rigParentConstraint");
            lrutils::setParentConstraintOffset( this->m_rigParentConstraint, inverseXform );
            //delete the new controller transform
            MGlobal::deleteNode( ctlObj );
            
            //find the global transformation matrix of the controller group
            MDagPath groupPath;
            status = oldCtlGroupFn.getPath(groupPath);
            MyCheckStatus(status, "MFnDagNode.getPath() failed");
            MMatrix oldCtlGroupWorldMatrix = groupPath.inclusiveMatrix(&status);
            MyCheckStatus(status, "MDagPath.inclusiveMatrix() failed");
            if(globalPos) {
                //update the animation curves attached to the old controller
                lrutils::updateAnimCurves(oldCtlObj, oldCtlWorldMatrices, oldCtlGroupWorldMatrix);
            }
        }
    }
}
Beispiel #25
0
MStatus sgBDataCmd_key::writeData( bool exportByMatrix )
{
	MStatus status;
	
	double dTime  = MAnimControl::currentTime().value();

	if( exportByMatrix )
	{
		for( unsigned int i=0; i<m_pathArrExport.length(); i++ )
		{
			m_objectKeyDatasExport[i].lengthTime++;
			m_objectKeyDatasExport[i].dArrTime.append( dTime );
			MFnDependencyNode fnNode( m_objectKeyDatasExport[i].oTargetNode );

			if( m_objectKeyDatasExport[i].numAttr == 0 )
			{
				continue;
			}
			if( m_objectKeyDatasExport[i].numAttr == 1 )
			{
				MPlug plug = fnNode.findPlug( m_objectKeyDatasExport[i].namesAttribute[0] );
				m_objectKeyDatasExport[i].dArrValuesArray.append( plug.asDouble() );
			}
			else
			{
				if( m_objectKeyDatasExport[i].numAttr == 10 )
				{
					MPlug plug = fnNode.findPlug( m_objectKeyDatasExport[i].namesAttribute[0] );
					m_objectKeyDatasExport[i].dArrValuesArray.append( plug.asDouble() );
				}

				MDagPath dagPath;
				dagPath.getAPathTo( m_objectKeyDatasExport[i].oTargetNode, dagPath );

				MTransformationMatrix trMtx = dagPath.inclusiveMatrix() * dagPath.exclusiveMatrixInverse();
				MVector trans   = trMtx.translation( MSpace::kTransform );
				double rotValues[3] ={0,0,0};
				MTransformationMatrix::RotationOrder order = MTransformationMatrix::kZXY;
				trMtx.getRotation( rotValues, order, MSpace::kTransform );
				double scales[3];
				trMtx.getScale( scales, MSpace::kTransform );

				m_objectKeyDatasExport[i].dArrValuesArray.append( trans.x );
				m_objectKeyDatasExport[i].dArrValuesArray.append( trans.y );
				m_objectKeyDatasExport[i].dArrValuesArray.append( trans.z );
				m_objectKeyDatasExport[i].dArrValuesArray.append( rotValues[0] );
				m_objectKeyDatasExport[i].dArrValuesArray.append( rotValues[1] );
				m_objectKeyDatasExport[i].dArrValuesArray.append( rotValues[2] );
				m_objectKeyDatasExport[i].dArrValuesArray.append( scales[0] );
				m_objectKeyDatasExport[i].dArrValuesArray.append( scales[1] );
				m_objectKeyDatasExport[i].dArrValuesArray.append( scales[2] );
			}
		}
	}
	else
	{
		for( unsigned int i=0; i<m_pathArrExport.length(); i++ )
		{
			m_objectKeyDatasExport[i].lengthTime++;
			m_objectKeyDatasExport[i].dArrTime.append( dTime );
			MFnDependencyNode fnNode( m_objectKeyDatasExport[i].oTargetNode );

			for( unsigned int j=0; j< m_objectKeyDatasExport[i].numAttr; j++ )
			{
				MPlug plug = fnNode.findPlug( m_objectKeyDatasExport[i].namesAttribute[j] );
				m_objectKeyDatasExport[i].dArrValuesArray.append( plug.asDouble() );
			}
		}
	}

	return MS::kSuccess;
}
bool DX11ViewportRenderer::drawScene(const MRenderingInfo &renderInfo)
//
// Description:
//		Draw the Maya scene, using a custom traverser.
//
{
	bool useDrawTraversal = true;
	float groundPlaneColor[3] = { 0.8f, 0.8f, 0.8f };

	if (useDrawTraversal)
	{
		const MDagPath &cameraPath = renderInfo.cameraPath();
		if (cameraPath.isValid())
		{
			// You can actually keep the traverser classes around
			// if desired. Here we just create temporary traversers
			// on the fly.
			//
			MDrawTraversal *trav = new MDrawTraversal;
			if (!trav)
			{
				MGlobal::displayWarning("DX11 renderer : failed to create a traversal class !\n");
				return true;
			}

			trav->enableFiltering( false );

			const MRenderTarget &renderTarget = renderInfo.renderTarget();
			trav->setFrustum( cameraPath, renderTarget.width(), 
							  renderTarget.height() );

			if (!trav->frustumValid())
			{
				MGlobal::displayWarning("DX11 renderer : Frustum is invalid !\n");
				return true;
			}

			trav->traverse();

			unsigned int numItems = trav->numberOfItems();
			unsigned int i;
			for (i=0; i<numItems; i++)
			{
				MDagPath path;
				trav->itemPath(i, path);

				if (path.isValid())
				{
					bool drawIt = false;

					// Default traverer may have view manips showing up.
					// This is currently a known Maya bug.
					if ( path.hasFn( MFn::kViewManip ))
						continue;

					//
					// Draw surfaces (polys, nurbs, subdivs)
					//
					bool active = false;
					bool templated = false;
					if ( path.hasFn( MFn::kMesh) || 
						 path.hasFn( MFn::kNurbsSurface) || 
						 path.hasFn( MFn::kSubdiv) )
					{
						drawIt = true;
						if (trav->itemHasStatus( i, MDrawTraversal::kActiveItem ))
						{
							active = true;
						}
						if (trav->itemHasStatus( i, MDrawTraversal::kTemplateItem ))
						{
							templated = true;
						}
					}

					//
					// Draw the ground plane
					//
					else if (path.hasFn( MFn::kSketchPlane ) ||
							 path.hasFn( MFn::kGroundPlane ))
					{
						MMatrix  matrix = path.inclusiveMatrix();
						MFnDagNode dagNode(path);
						MBoundingBox box = dagNode.boundingBox();
						drawBounds( matrix, box, groundPlaneColor );
					}

					if (drawIt)
					{
						drawSurface( path, active, templated );
					}
				}
			}

			if (trav)
				delete trav;

			// Cleanup any unused resource items
			bool onlyInvalidItems = true;
			clearResources( onlyInvalidItems, false );
		}
	}
	else
	{
		// Draw some poly bounding boxes 
		//
		MItDag::TraversalType traversalType = MItDag::kDepthFirst;
		MFn::Type filter = MFn::kMesh;
		MStatus status;

		MItDag dagIterator( traversalType, filter, &status);

		for ( ; !dagIterator.isDone(); dagIterator.next() ) 
		{

			MDagPath dagPath;

			status = dagIterator.getPath(dagPath);
			if ( !status ) {
				status.perror("MItDag::getPath");
				continue;
			}

			MFnDagNode dagNode(dagPath, &status);
			if ( !status ) {
				status.perror("MFnDagNode constructor");
				continue;
			}

			MMatrix  matrix = dagPath.inclusiveMatrix();
			MBoundingBox box = dagNode.boundingBox();
			drawBounds( matrix, box, groundPlaneColor );
		}
	}
	return true;
}
Beispiel #27
0
bool ProceduralHolderUI::select( MSelectInfo &selectInfo, MSelectionList &selectionList, MPointArray &worldSpaceSelectPts ) const
{
	MStatus s;

	// early out if we're not selectable. we always allow components to be selected if we're highlighted,
	// but we don't allow ourselves to be selected as a whole unless meshes are in the selection mask.
	// it's not ideal that we act like a mesh, but it's at least consistent with the drawing mask we use.
	if( selectInfo.displayStatus() != M3dView::kHilite )
	{
		MSelectionMask meshMask( MSelectionMask::kSelectMeshes );
		if( !selectInfo.selectable( meshMask ) )
		{
			return false;
		}
	}

	// early out if we have no scene to draw
	ProceduralHolder *proceduralHolder = static_cast<ProceduralHolder *>( surfaceShape() );
	IECoreGL::ConstScenePtr scene = proceduralHolder->scene();
	if( !scene )
	{
		return false;
	}
	
	// we want to perform the selection using an IECoreGL::Selector, so we
	// can avoid the performance penalty associated with using GL_SELECT mode.
	// that means we don't really want to call view.beginSelect(), but we have to
	// call it just to get the projection matrix for our own selection, because as far
	// as i can tell, there is no other way of getting it reliably.
	
	M3dView view = selectInfo.view();
	view.beginSelect();
	Imath::M44d projectionMatrix;
	glGetDoublev( GL_PROJECTION_MATRIX, projectionMatrix.getValue() );
	view.endSelect();
	
	view.beginGL();
	
		glMatrixMode( GL_PROJECTION );
		glLoadMatrixd( projectionMatrix.getValue() );
		
		IECoreGL::Selector::Mode selectionMode = IECoreGL::Selector::IDRender;
		if( selectInfo.displayStatus() == M3dView::kHilite && !selectInfo.singleSelection() )
		{
			selectionMode = IECoreGL::Selector::OcclusionQuery;
		}
		
		std::vector<IECoreGL::HitRecord> hits;
		{
			IECoreGL::Selector selector( Imath::Box2f( Imath::V2f( 0 ), Imath::V2f( 1 ) ), selectionMode, hits );
				
			IECoreGL::State::bindBaseState();
			selector.baseState()->bind();
			scene->render( selector.baseState() );
		
			if( selectInfo.displayStatus() != M3dView::kHilite )
			{
				// we're not in component selection mode. we'd like to be able to select the procedural
				// object using the bounding box so we draw it too.
				MPlug pDrawBound( proceduralHolder->thisMObject(), ProceduralHolder::aDrawBound );
				bool drawBound = true;
				pDrawBound.getValue( drawBound );
				if( drawBound )
				{
					IECoreGL::BoxPrimitive::renderWireframe( IECore::convert<Imath::Box3f>( proceduralHolder->boundingBox() ) );
				}
			}
		}
						
	view.endGL();
	
	if( !hits.size() )
	{
		return false;
	}

	// iterate over the hits, converting them into components and also finding
	// the closest one.
	MIntArray componentIndices;
	float depthMin = std::numeric_limits<float>::max();
	int depthMinIndex = -1;
	for( int i=0, e = hits.size(); i < e; i++ )
	{		
		if( hits[i].depthMin < depthMin )
		{
			depthMin = hits[i].depthMin;
			depthMinIndex = componentIndices.length();
		}
		
		ProceduralHolder::ComponentsMap::const_iterator compIt = proceduralHolder->m_componentsMap.find( hits[i].name.value() );
		assert( compIt != proceduralHolder->m_componentsMap.end() );
		componentIndices.append( compIt->second.first );		
	}
	
	assert( depthMinIndex >= 0 );

	// figure out the world space location of the closest hit
	
	MDagPath camera;
	view.getCamera( camera );
	MFnCamera fnCamera( camera.node() );
	float near = fnCamera.nearClippingPlane();
	float far = fnCamera.farClippingPlane();
	
	float z = -1;
	if( fnCamera.isOrtho() )
	{
		z = Imath::lerp( near, far, depthMin );
	}
	else
	{
		// perspective camera - depth isn't linear so linearise to get z
		float a = far / ( far - near );
		float b = far * near / ( near - far );
		z = b / ( depthMin - a );
	}	
	
	MPoint localRayOrigin;
	MVector localRayDirection;
	selectInfo.getLocalRay( localRayOrigin, localRayDirection );
	MMatrix localToCamera = selectInfo.selectPath().inclusiveMatrix() * camera.inclusiveMatrix().inverse();	
	MPoint cameraRayOrigin = localRayOrigin * localToCamera;
	MVector cameraRayDirection = localRayDirection * localToCamera;
	
	MPoint cameraIntersectionPoint = cameraRayOrigin + cameraRayDirection * ( -( z - near ) / cameraRayDirection.z );
	MPoint worldIntersectionPoint = cameraIntersectionPoint * camera.inclusiveMatrix();
	
	// turn the processed hits into appropriate changes to the current selection
				
	if( selectInfo.displayStatus() == M3dView::kHilite )
	{
		// selecting components
		MFnSingleIndexedComponent fnComponent;
		MObject component = fnComponent.create( MFn::kMeshPolygonComponent, &s ); assert( s );
	
		if( selectInfo.singleSelection() )
		{
			fnComponent.addElement( componentIndices[depthMinIndex] );
		}
		else
		{
			fnComponent.addElements( componentIndices );
		}
		
		MSelectionList items;
		items.add( selectInfo.multiPath(), component );
		
		selectInfo.addSelection(
			items, worldIntersectionPoint,
			selectionList, worldSpaceSelectPts,
			MSelectionMask::kSelectMeshFaces,
			true
		);		
	}
	else
	{
		// selecting objects
		MSelectionList item;
		item.add( selectInfo.selectPath() );

		selectInfo.addSelection(
			item, worldIntersectionPoint,
			selectionList, worldSpaceSelectPts,
			MSelectionMask::kSelectMeshes,
			false
		);
	}
	
	return true;
}
Beispiel #28
0
void printInstancerUsingFunctionSet( const MDagPath& instancerPath )
{
	char str[256];
	MString pathName = instancerPath.fullPathName();
	sprintf( str, "Instancer %s:", pathName.asChar() );
	MGlobal::displayInfo( MString(str) );
		
	MFnInstancer fnInst( instancerPath );
	int numParticles = fnInst.particleCount();
	sprintf( str, "    num particles = %d", numParticles );
	
	//
	//	Step 1: Use the MFnInstancer::instancesForParticle() to enumerate
	//			the paths instanced under each particle.  
	//
	MGlobal::displayInfo( "    Using instancesForParticle()...." );
	MMatrix instancerWorldMatrix = instancerPath.inclusiveMatrix();
	int p = 0;
	for( p = 0; p < numParticles; p++ )
	{
		MMatrix particleMatrix;
		MDagPathArray particlePaths;
		int numInstances = fnInst.instancesForParticle( p, particlePaths, particleMatrix );

		//	iterate over all the instances under this particle
		//		
		for( int i = 0; i < numInstances; i++ )
		{
			const MDagPath& instancedPath = particlePaths[i];
			MMatrix instancedPathMatrix = instancedPath.inclusiveMatrix();

			//	the final world position of the instanced shape is determined
			//	by the original path's matrix combined with the offset matrix
			//	provided by the instancer
			//
			MMatrix finalMatrixForPath = instancedPathMatrix * particleMatrix;
			MPoint finalPoint = MPoint::origin * finalMatrixForPath;
				
			MString instancedPathName = instancedPath.fullPathName();
			sprintf( str, "        Path %-50s at position (%lf,%lf,%lf)", instancedPathName.asChar(), finalPoint.x, finalPoint.y, finalPoint.z );
			MGlobal::displayInfo( str );
		}
	}
	
	//
	//	Step 2: Use the MFnInstancer::allInstances() method to enumerate all
	//			particle instances generated by this instancer.  The same
	//			information that was extracted one particle at a time in 
	//			Step 1 is now retrieved with one function call, and stored in
	//			a set of arrays.
	//
	MGlobal::displayInfo( "    Using allInstances()...." );
	MDagPathArray allPaths;
	MMatrixArray allMatrices;
	MIntArray pathIndices;
	MIntArray pathStartIndices;
	
	fnInst.allInstances( allPaths, allMatrices, pathStartIndices, pathIndices );
	for( p = 0; p < numParticles; p++ )
	{
		MMatrix particleMatrix = allMatrices[p];

		//	the number of paths instanced under a particle is computed by
		//	taking the difference between the starting path index for this
		//	particle and that of the next particle.  The size of the start
		//	index array is always one larger than the number of particles.
		//
		int numPaths = pathStartIndices[p+1]-pathStartIndices[p];
		
		//	the values pathIndices[pathStart...pathStart+numPaths] give the
		//	indices in the allPaths array of the paths instanced under this
		//	particular particle.  Remember, different paths can be instanced
		//	under each particle.
		//
		int pathStart = pathStartIndices[p];

		//	loop through the instanced paths for this particle
		//
		for( int i = pathStart; i < pathStart+numPaths; i++ )
		{
			int curPathIndex = pathIndices[i];
			const MDagPath& curPath = allPaths[curPathIndex];

			MMatrix instancedPathMatrix = curPath.inclusiveMatrix();
			MMatrix finalMatrixForPath = instancedPathMatrix * particleMatrix;
			MPoint finalPoint = MPoint::origin * finalMatrixForPath;
				
			MString instancedPathName = curPath.fullPathName();
			sprintf( str, "        Path %-50s at position (%lf,%lf,%lf)", instancedPathName.asChar(), finalPoint.x, finalPoint.y, finalPoint.z );
			MGlobal::displayInfo( str );
		}
	}
}
bool DX11ViewportRenderer::drawSurface( const MDagPath &dagPath, bool active, bool templated)
{
	bool drewSurface = false;

	if ( !dagPath.hasFn( MFn::kMesh ))
	{
		MMatrix  matrix = dagPath.inclusiveMatrix();
		MFnDagNode dagNode(dagPath);
		MBoundingBox box = dagNode.boundingBox();
		float color[3] = {0.6f, 0.3f, 0.0f};
		if (active)
		{
			color[0] = 1.0f;
			color[1] = 1.0f;
			color[2] = 1.0f;
		}
		else if (templated)
		{
			color[0] = 1.0f;
			color[1] = 0.686f;
			color[2] = 0.686f;
		}
		drawBounds( matrix, box, color);
		return true;
	}

	if ( dagPath.hasFn( MFn::kMesh ))
	{
		MMatrix  matrix = dagPath.inclusiveMatrix();
		MFnDagNode dagNode(dagPath);

		// Look for any hardware shaders which can draw D3D first.
		//
		bool drewWithHwShader = false;
		{
			MFnMesh fnMesh(dagPath);
			MObjectArray sets;
			MObjectArray comps;
			unsigned int instanceNum = dagPath.instanceNumber();
			if (!fnMesh.getConnectedSetsAndMembers(instanceNum, sets, comps, true))
				MGlobal::displayError("ERROR : MFnMesh::getConnectedSetsAndMembers");
			for ( unsigned i=0; i<sets.length(); i++ ) 
			{
				MObject set = sets[i];
				MObject comp = comps[i];

				MStatus status;
				MFnSet fnSet( set, &status );
				if (status == MS::kFailure) {
					MGlobal::displayError("ERROR: MFnSet::MFnSet");
					continue;
				}

				MObject shaderNode = findShader(set);
				if (shaderNode != MObject::kNullObj)
				{
					MPxHardwareShader * hwShader = 
						MPxHardwareShader::getHardwareShaderPtr( shaderNode );

					if (hwShader)
					{
						const MRenderProfile & profile = hwShader->profile();
						if (profile.hasRenderer( MRenderProfile::kMayaD3D))
						{
							// Render a Maya D3D hw shader here....
							//printf("Found a D3D hw shader\n");
							//drewWithHwShader = true;
						}
					}
				}
			}
		}

		// Get the geometry buffers for this bad boy and render them
		D3DGeometry* Geometry = m_resourceManager.getGeometry( dagPath, m_pD3DDevice);
		if( Geometry)
		{
			// Transform from object to world space
			//
			XMMATRIX objectToWorld = XMMATRIX
				(
				(float)matrix.matrix[0][0], (float)matrix.matrix[0][1], (float)matrix.matrix[0][2], (float)matrix.matrix[0][3],
				(float)matrix.matrix[1][0], (float)matrix.matrix[1][1], (float)matrix.matrix[1][2], (float)matrix.matrix[1][3],
				(float)matrix.matrix[2][0], (float)matrix.matrix[2][1], (float)matrix.matrix[2][2], (float)matrix.matrix[2][3],
				(float)matrix.matrix[3][0], (float)matrix.matrix[3][1], (float)matrix.matrix[3][2], (float)matrix.matrix[3][3]
			);

			FixedFunctionConstants cb;

			if (!drewWithHwShader)
			{
				// Get material properties for shader associated with mesh
				//
				// 1. Try to draw with the sample internal programmable shader
				bool drewGeometryWithShader = false;

				// 2. Draw with fixed function shader
				if (!drewGeometryWithShader)
				{
					// Set up a default material, just in case there is none.
					//
					float diffuse[3];
					if (active)
					{
						if (templated)
						{
							m_pD3DDeviceCtx->RSSetState( m_pWireframeRS );
							diffuse[0] = 1.0f; diffuse[1] = 0.686f; diffuse[2] = 0.686f;
						}
						else
						{
							m_pD3DDeviceCtx->RSSetState( m_pNormalRS );
							diffuse[0] = 0.6f; diffuse[1] = 0.6f; diffuse[2] = 0.6f;
						}
					}
					else
					{
						if (templated)
						{
							m_pD3DDeviceCtx->RSSetState( m_pWireframeRS );
							diffuse[0] = 1.0f; diffuse[1] = 0.686f; diffuse[2] = 0.686f;
						}
						else
						{
							m_pD3DDeviceCtx->RSSetState( m_pNormalRS );
							diffuse[0] = 0.5f; diffuse[1] = 0.5f; diffuse[2] = 0.5f;
						}
					}

					// Set constant buffer
					XMVECTOR det;
					cb.wvIT = XMMatrixInverse( &det, objectToWorld * m_currentViewMatrix );
					cb.wvp = XMMatrixTranspose( objectToWorld * m_currentViewMatrix * m_currentProjectionMatrix );
					cb.wv = XMMatrixTranspose( objectToWorld * m_currentViewMatrix );
					cb.lightDir = XMFLOAT4( 0.0f, 0.0f, 1.0f, 0.0f );
					cb.lightColor = XMFLOAT4( 1.0f, 1.0f, 1.0f, 0.0f );
					cb.ambientLight = XMFLOAT4( 0.2f, 0.2f, 0.2f, 0.0f );
					cb.diffuseMaterial = XMFLOAT4( diffuse[0], diffuse[1], diffuse[2], 0.0f );
					cb.specularColor = XMFLOAT4( 0.2f, 0.2f, 0.2f, 0.0f );
					cb.diffuseCoeff = 1.0f;
					cb.shininess = 16.0f;
					cb.transparency = 1.0f;
					m_pD3DDeviceCtx->UpdateSubresource( m_pFixedFunctionConstantBuffer, 0, NULL, &cb, 0, 0 );

					// get shader
					SurfaceEffectItemList::const_iterator it = m_resourceManager.getSurfaceEffectItemList().find( "Maya_fixedFunction" );
					if ( it == m_resourceManager.getSurfaceEffectItemList().end() )
						return false;
					const SurfaceEffectItem* sei = it->second;

					// bind shaders
					m_pD3DDeviceCtx->VSSetShader( sei->fVertexShader, NULL, 0 );
					m_pD3DDeviceCtx->VSSetConstantBuffers( 0, 1, &m_pFixedFunctionConstantBuffer );
					m_pD3DDeviceCtx->IASetInputLayout( sei->fInputLayout );
					m_pD3DDeviceCtx->PSSetShader( sei->fPixelShader, NULL, 0 );
					m_pD3DDeviceCtx->PSSetConstantBuffers( 0, 1, &m_pFixedFunctionConstantBuffer );

					Geometry->Render( m_pD3DDeviceCtx );

					drewSurface = true;
				}
			}

			// Draw wireframe on top
			//

			if ( drewSurface && active )
			{
				bool drawActiveWithBounds = false;
				if (drawActiveWithBounds)
				{
					MBoundingBox box = dagNode.boundingBox();
					float color[3] = {1.0f, 1.0f, 1.0f};
					drawBounds( matrix, box, color );
				}
				else
				{
					cb.lightColor = XMFLOAT4( 0.0f, 0.0f, 0.0f, 0.0f );
					cb.ambientLight = XMFLOAT4( 1.0f, 1.0f, 1.0f, 0.0f );
					cb.diffuseMaterial = XMFLOAT4( 1.0f, 1.0f, 1.0f, 0.0f );
					cb.specularColor = XMFLOAT4( 0.0f, 0.0f, 0.0f, 0.0f );
					m_pD3DDeviceCtx->UpdateSubresource( m_pFixedFunctionConstantBuffer, 0, NULL, &cb, 0, 0 );

					m_pD3DDeviceCtx->RSSetState( m_pWireframeRS );

					Geometry->Render( m_pD3DDeviceCtx );				
				}
			}
		} // If Geometry
	}
	return drewSurface;
}
Beispiel #30
0
bool DrawableHolderUI::select( MSelectInfo &selectInfo, MSelectionList &selectionList, MPointArray &worldSpaceSelectPts ) const
{
	MStatus s;

	// early out if we're not selectable. we always allow components to be selected if we're highlighted,
	// but we don't allow ourselves to be selected as a whole unless meshes are in the selection mask.
	// it's not ideal that we act like a mesh, but it's at least consistent with the drawing mask we use.
	if( selectInfo.displayStatus() != M3dView::kHilite )
	{
		MSelectionMask meshMask( MSelectionMask::kSelectMeshes );
		if( !selectInfo.selectable( meshMask ) )
		{
			return false;
		}
	}

	// early out if we have no scene to draw
	DrawableHolder *drawableHolder = static_cast<DrawableHolder *>( surfaceShape() );
	IECoreGL::ConstScenePtr scene = drawableHolder->scene();
	if( !scene )
	{
		return false;
	}

	// we want to perform the selection using an IECoreGL::Selector, so we
	// can avoid the performance penalty associated with using GL_SELECT mode.
	// that means we don't really want to call view.beginSelect(), but we have to
	// call it just to get the projection matrix for our own selection, because as far
	// as i can tell, there is no other way of getting it reliably.

	M3dView view = selectInfo.view();
	view.beginSelect();
	Imath::M44d projectionMatrix;
	glGetDoublev( GL_PROJECTION_MATRIX, projectionMatrix.getValue() );
	view.endSelect();

	view.beginGL();

		glMatrixMode( GL_PROJECTION );
		glLoadMatrixd( projectionMatrix.getValue() );

		IECoreGL::Selector::Mode selectionMode = IECoreGL::Selector::IDRender;
		if( selectInfo.displayStatus() == M3dView::kHilite && !selectInfo.singleSelection() )
		{
			selectionMode = IECoreGL::Selector::OcclusionQuery;
		}

		std::vector<IECoreGL::HitRecord> hits;
		{
			IECoreGL::Selector selector( Imath::Box2f( Imath::V2f( 0 ), Imath::V2f( 1 ) ), selectionMode, hits );

			IECoreGL::State::bindBaseState();
			selector.baseState()->bind();
			scene->render( selector.baseState() );
		}

	view.endGL();

	if( !hits.size() )
	{
		return false;
	}

	// find the depth of the closest hit:
	MIntArray componentIndices;
	float depthMin = std::numeric_limits<float>::max();
	for( int i=0, e = hits.size(); i < e; i++ )
	{
		if( hits[i].depthMin < depthMin )
		{
			depthMin = hits[i].depthMin;
		}
	}


	// figure out the world space location of the closest hit

	MDagPath camera;
	view.getCamera( camera );
	MFnCamera fnCamera( camera.node() );
	float near = fnCamera.nearClippingPlane();
	float far = fnCamera.farClippingPlane();

	float z = -1;
	if( fnCamera.isOrtho() )
	{
		z = Imath::lerp( near, far, depthMin );
	}
	else
	{
		// perspective camera - depth isn't linear so linearise to get z
		float a = far / ( far - near );
		float b = far * near / ( near - far );
		z = b / ( depthMin - a );
	}

	MPoint localRayOrigin;
	MVector localRayDirection;
	selectInfo.getLocalRay( localRayOrigin, localRayDirection );
	MMatrix localToCamera = selectInfo.selectPath().inclusiveMatrix() * camera.inclusiveMatrix().inverse();
	MPoint cameraRayOrigin = localRayOrigin * localToCamera;
	MVector cameraRayDirection = localRayDirection * localToCamera;

	MPoint cameraIntersectionPoint = cameraRayOrigin + cameraRayDirection * ( -( z - near ) / cameraRayDirection.z );
	MPoint worldIntersectionPoint = cameraIntersectionPoint * camera.inclusiveMatrix();

	MSelectionList item;
	item.add( selectInfo.selectPath() );

	selectInfo.addSelection(
		item, worldIntersectionPoint,
		selectionList, worldSpaceSelectPts,
		MSelectionMask::kSelectMeshes,
		false
	);

	return true;
}