Exemplo n.º 1
0
GlfSimpleLight
HdStLight::_ApproximateAreaLight(SdfPath const &id, 
                                 HdSceneDelegate *sceneDelegate)
{
    // Get the color of the light
    GfVec3f hdc = sceneDelegate->GetLightParamValue(id, HdStLightTokens->color)
            .Get<GfVec3f>();

    // Extract intensity
    float intensity = 
        sceneDelegate->GetLightParamValue(id, HdLightTokens->intensity)
            .Get<float>();

    // Extract the exposure of the light
    float exposure = 
        sceneDelegate->GetLightParamValue(id, HdLightTokens->exposure)
            .Get<float>();
    intensity *= powf(2.0f, GfClamp(exposure, -50.0f, 50.0f));

    // Calculate the final color of the light
    GfVec4f c(hdc[0]*intensity, hdc[1]*intensity, hdc[2]*intensity, 1.0f); 

    // Get the transform of the light
    GfMatrix4d transform = _params[HdTokens->transform].Get<GfMatrix4d>();
    GfVec3d hdp = transform.ExtractTranslation();
    GfVec4f p = GfVec4f(hdp[0], hdp[1], hdp[2], 1.0f);

    // Create the Glf Simple Light object that will be used by the rest
    // of the pipeline. No support for shadows for this translated light.
    GlfSimpleLight l;
    l.SetPosition(p);
    l.SetDiffuse(c);
    l.SetHasShadow(false);
    return l;
}
Exemplo n.º 2
0
Arquivo: frustum.cpp Projeto: JT-a/USD
GfMatrix4d
GfFrustum::ComputeViewMatrix() const
{
    GfMatrix4d m;
    m.SetLookAt(_position, _rotation);
    return m;
}
Exemplo n.º 3
0
Arquivo: frustum.cpp Projeto: JT-a/USD
vector<GfVec3d>
GfFrustum::ComputeCornersAtDistance(double d) const
{
    const GfVec2d &winMin = _window.GetMin();
    const GfVec2d &winMax = _window.GetMax();

    vector<GfVec3d> corners;
    corners.reserve(4);

    if (_projectionType == Perspective) {
        // Similar to ComputeCorners
        corners.push_back(GfVec3d(d * winMin[0], d * winMin[1], -d));
        corners.push_back(GfVec3d(d * winMax[0], d * winMin[1], -d));
        corners.push_back(GfVec3d(d * winMin[0], d * winMax[1], -d));
        corners.push_back(GfVec3d(d * winMax[0], d * winMax[1], -d));
    }
    else {
        corners.push_back(GfVec3d(winMin[0], winMin[1], -d));
        corners.push_back(GfVec3d(winMax[0], winMin[1], -d));
        corners.push_back(GfVec3d(winMin[0], winMax[1], -d));
        corners.push_back(GfVec3d(winMax[0], winMax[1], -d));
    }

    // Each corner is then transformed into world space by the inverse
    // of the view matrix.
    const GfMatrix4d m = ComputeViewInverse();
    for (int i = 0; i < 4; i++)
        corners[i] = m.Transform(corners[i]);

    return corners;
}
GfMatrix4d
Hdx_UnitTestGLDrawing::GetViewMatrix() const
{
    GfMatrix4d viewMatrix;
    viewMatrix.SetIdentity();
    // rotate from z-up to y-up
    viewMatrix *= GfMatrix4d().SetRotate(GfRotation(GfVec3d(1.0,0.0,0.0), -90.0));
    viewMatrix *= GfMatrix4d().SetRotate(GfRotation(GfVec3d(0, 1, 0), _rotate[1]));
    viewMatrix *= GfMatrix4d().SetRotate(GfRotation(GfVec3d(1, 0, 0), _rotate[0]));
    viewMatrix *= GfMatrix4d().SetTranslate(GfVec3d(_translate[0], _translate[1], _translate[2]));

    return viewMatrix;
}
Exemplo n.º 5
0
Arquivo: frustum.cpp Projeto: JT-a/USD
vector<GfVec3d>
GfFrustum::ComputeCorners() const
{
    const GfVec2d &winMin = _window.GetMin();
    const GfVec2d &winMax = _window.GetMax();
    double near           = _nearFar.GetMin();
    double far            = _nearFar.GetMax();

    vector<GfVec3d> corners;
    corners.reserve(8);

    if (_projectionType == Perspective) {
        // Compute the eye-space corners of the near-plane and
        // far-plane frustum rectangles using similar triangles. The
        // reference plane in which the window rectangle is defined is
        // a distance of 1 from the eyepoint. By similar triangles,
        // just multiply the window points by near and far to get the
        // near and far rectangles.
        // XXX Note: If we ever allow reference plane depth to be other 
        // than 1.0, we'll need to revisit this.
        corners.push_back(GfVec3d(near * winMin[0], near * winMin[1], -near));
        corners.push_back(GfVec3d(near * winMax[0], near * winMin[1], -near));
        corners.push_back(GfVec3d(near * winMin[0], near * winMax[1], -near));
        corners.push_back(GfVec3d(near * winMax[0], near * winMax[1], -near));
        corners.push_back(GfVec3d(far  * winMin[0], far  * winMin[1], -far));
        corners.push_back(GfVec3d(far  * winMax[0], far  * winMin[1], -far));
        corners.push_back(GfVec3d(far  * winMin[0], far  * winMax[1], -far));
        corners.push_back(GfVec3d(far  * winMax[0], far  * winMax[1], -far));
    }
    else {
        // Just use the reference plane rectangle as is, translated to
        // the near and far planes.
        corners.push_back(GfVec3d(winMin[0], winMin[1], -near));
        corners.push_back(GfVec3d(winMax[0], winMin[1], -near));
        corners.push_back(GfVec3d(winMin[0], winMax[1], -near));
        corners.push_back(GfVec3d(winMax[0], winMax[1], -near));
        corners.push_back(GfVec3d(winMin[0], winMin[1], -far));
        corners.push_back(GfVec3d(winMax[0], winMin[1], -far));
        corners.push_back(GfVec3d(winMin[0], winMax[1], -far));
        corners.push_back(GfVec3d(winMax[0], winMax[1], -far));
    }

    // Each corner is then transformed into world space by the inverse
    // of the view matrix.
    GfMatrix4d m = ComputeViewInverse();
    for (int i = 0; i < 8; i++)
        corners[i] = m.Transform(corners[i]);

    return corners;
}
Exemplo n.º 6
0
Arquivo: frustum.cpp Projeto: JT-a/USD
GfMatrix4d
GfFrustum::ComputeProjectionMatrix() const
{
    // Build the projection matrix per Section 2.11 of
    // The OpenGL Specification: Coordinate Transforms.
    GfMatrix4d matrix;
    matrix.SetIdentity();

    const double l = _window.GetMin()[0];
    const double r = _window.GetMax()[0];
    const double b = _window.GetMin()[1];
    const double t = _window.GetMax()[1];
    const double n = _nearFar.GetMin();
    const double f = _nearFar.GetMax();

    const double rl = r - l;
    const double tb = t - b;
    const double fn = f - n;

    if (_projectionType == GfFrustum::Orthographic) {
        matrix[0][0] =  2.0 / rl;
        matrix[1][1] =  2.0 / tb;
        matrix[2][2] = -2.0 / fn;
        matrix[3][0] = -(r + l) / rl;
        matrix[3][1] = -(t + b) / tb;
        matrix[3][2] = -(f + n) / fn;
    }
    else {
        // Perspective:
        // The window coordinates are specified with respect to the
        // reference plane (near == 1).
        // XXX Note: If we ever allow reference plane depth to be other 
        // than 1.0, we'll need to revisit this.
        matrix[0][0] = 2.0 / rl;
        matrix[1][1] = 2.0 / tb;
        matrix[2][2] = -(f + n) / fn;
        matrix[2][0] =  (r + l) / rl;
        matrix[2][1] =  (t + b) / tb;
        matrix[3][2] = -2.0 * n * f / fn;
        matrix[2][3] = -1.0;
        matrix[3][3] =  0.0;
    }

    return matrix;
}
Exemplo n.º 7
0
/* virtual */
void
HdSt_TestLightingShader::SetCamera(GfMatrix4d const &worldToViewMatrix,
                                 GfMatrix4d const &projectionMatrix)
{
    for (int i = 0; i < 2; ++i) {
        _lights[i].eyeDir
            = worldToViewMatrix.TransformDir(_lights[i].dir).GetNormalized();
    }
}
Exemplo n.º 8
0
Arquivo: frustum.cpp Projeto: JT-a/USD
void
GfFrustum::SetPositionAndRotationFromMatrix(
    const GfMatrix4d &camToWorldXf)
{
    // First conform matrix to be...
    GfMatrix4d conformedXf = camToWorldXf;
    // ... right handed
    if (!conformedXf.IsRightHanded()) {
        static GfMatrix4d flip(GfVec4d(-1.0, 1.0, 1.0, 1.0));
        conformedXf = flip * conformedXf;
    }

    // ... and orthonormal
    conformedXf.Orthonormalize();

    SetRotation(conformedXf.ExtractRotation());
    SetPosition(conformedXf.ExtractTranslation());
}
Exemplo n.º 9
0
// Assumes rotationOrder is XYZ.
static void
_RotMatToRotXYZ(
    const GfMatrix4d &rotMat,
    GfVec3f *rotXYZ)
{
    GfRotation rot = rotMat.ExtractRotation();
    GfVec3d angles = rot.Decompose(GfVec3d::ZAxis(),
                                   GfVec3d::YAxis(),
                                   GfVec3d::XAxis());
    *rotXYZ = GfVec3f(angles[2], angles[1], angles[0]);
}
Exemplo n.º 10
0
GfPlane &
GfPlane::Transform(const GfMatrix4d &matrix) 
{
    // Compute the point on the plane along the normal from the origin.
    GfVec3d pointOnPlane = _distance * _normal;

    // Transform the plane normal by the adjoint of the matrix to get
    // the new normal.  The adjoint (inverse transpose) is used to
    // multiply normals so they are not scaled incorrectly.
    GfMatrix4d adjoint = matrix.GetInverse().GetTranspose();
    _normal = adjoint.TransformDir(_normal).GetNormalized();

    // Transform the point on the plane by the matrix.
    pointOnPlane = matrix.Transform(pointOnPlane);

    // The new distance is the projected distance of the vector to the
    // transformed point onto the (unit) transformed normal. This is
    // just a dot product.
    _distance = GfDot(pointOnPlane, _normal);

    return *this;
}
Exemplo n.º 11
0
// Assumes rotationOrder is XYZ.
static void
_RotMatToRotTriplet(
    const GfMatrix4d &rotMat,
    GfVec3d *rotTriplet)
{
    GfRotation rot = rotMat.ExtractRotation();
    GfVec3d angles = rot.Decompose(GfVec3d::ZAxis(),
                                   GfVec3d::YAxis(),
                                   GfVec3d::XAxis());
    (*rotTriplet)[0] = angles[2];
    (*rotTriplet)[1] = angles[1];
    (*rotTriplet)[2] = angles[0];
}
Exemplo n.º 12
0
static void
_MatrixToVectorsWithPivotInvariant(
    const GfMatrix4d &m,
    const GfVec3d pivotPosition,
    const GfVec3d pivotOrientation,
    GfVec3d *translation, 
    GfVec3d *rotation, 
    GfVec3d *scale, 
    GfVec3d *scaleOrientation) 
{

    GfMatrix3d pivotOrientMat = _EulerXYZToMatrix3d(pivotOrientation);

    GfMatrix4d pp    = GfMatrix4d(1.0).SetTranslate( pivotPosition);
    GfMatrix4d ppInv = GfMatrix4d(1.0).SetTranslate(-pivotPosition);
    GfMatrix4d po    = GfMatrix4d(1.0).SetRotate(pivotOrientMat);
    GfMatrix4d poInv = GfMatrix4d(1.0).SetRotate(pivotOrientMat.GetInverse());

    GfMatrix4d factorMe = po * pp * m * ppInv;

    GfMatrix4d scaleOrientMat, factoredRotMat, perspMat;

    factorMe.Factor(&scaleOrientMat, scale, &factoredRotMat, 
                    translation, &perspMat);

    GfMatrix4d rotMat = factoredRotMat * poInv;

    if(not rotMat.Orthonormalize(/* issueWarning */ false))
        TF_WARN("Failed to orthonormalize rotMat.");

    _RotMatToRotTriplet(rotMat, rotation);

    if(not scaleOrientMat.Orthonormalize(/* issueWarning */ false))
        TF_WARN("Failed to orthonormalize scaleOrientMat.");

    _RotMatToRotTriplet(scaleOrientMat, scaleOrientation);
}
Exemplo n.º 13
0
static
bool
_IsMatrixIdentity(const GfMatrix4d& matrix)
{
    const GfMatrix4d IDENTITY(1.0);
    const double TOLERANCE = 1e-6;

    if (GfIsClose(matrix.GetRow(0), IDENTITY.GetRow(0), TOLERANCE)      &&
            GfIsClose(matrix.GetRow(1), IDENTITY.GetRow(1), TOLERANCE)  &&
            GfIsClose(matrix.GetRow(2), IDENTITY.GetRow(2), TOLERANCE)  &&
            GfIsClose(matrix.GetRow(3), IDENTITY.GetRow(3), TOLERANCE)) {
        return true;
    }

    return false;
}
Exemplo n.º 14
0
static void
_ConvertMatrixToComponents(const GfMatrix4d &matrix, 
                           GfVec3d *translation, 
                           GfVec3f *rotation,
                           GfVec3f *scale)
{
    GfMatrix4d rotMat(1.0);
    GfVec3d doubleScale(1.0);
    GfMatrix4d scaleOrientMatUnused, perspMatUnused;
    matrix.Factor(&scaleOrientMatUnused, &doubleScale, &rotMat, 
                    translation, &perspMatUnused);

    *scale = GfVec3f(doubleScale[0], doubleScale[1], doubleScale[2]);

    if (!rotMat.Orthonormalize(/* issueWarning */ false))
        TF_WARN("Failed to orthonormalize rotation matrix.");

    _RotMatToRotXYZ(rotMat, rotation);
}
Exemplo n.º 15
0
void
PxrUsdKatanaReadPointInstancer(
        const UsdGeomPointInstancer& instancer,
        const PxrUsdKatanaUsdInPrivateData& data,
        PxrUsdKatanaAttrMap& instancerAttrMap,
        PxrUsdKatanaAttrMap& sourcesAttrMap,
        PxrUsdKatanaAttrMap& instancesAttrMap,
        PxrUsdKatanaAttrMap& inputAttrMap)
{
    const double currentTime = data.GetCurrentTime();

    PxrUsdKatanaReadXformable(instancer, data, instancerAttrMap);

    // Get primvars for setting later. Unfortunatley, the only way to get them
    // out of the attr map is to build it, which will cause its contents to be
    // cleared. We'll need to restore its contents before continuing.
    //
    FnKat::GroupAttribute instancerAttrs = instancerAttrMap.build();
    FnKat::GroupAttribute primvarAttrs =
            instancerAttrs.getChildByName("geometry.arbitrary");
    for (int64_t i = 0; i < instancerAttrs.getNumberOfChildren(); ++i)
    {
        instancerAttrMap.set(instancerAttrs.getChildName(i),
                instancerAttrs.getChildByIndex(i));
    }

    instancerAttrMap.set("type", FnKat::StringAttribute("usd point instancer"));

    const std::string fileName = data.GetUsdInArgs()->GetFileName();
    instancerAttrMap.set("info.usd.fileName", FnKat::StringAttribute(fileName));

    FnKat::GroupAttribute inputAttrs = inputAttrMap.build();

    const std::string katOutputPath = FnKat::StringAttribute(
            inputAttrs.getChildByName("outputLocationPath")).getValue("", false);
    if (katOutputPath.empty())
    {
        _LogAndSetError(instancerAttrMap, "No output location path specified");
        return;
    }

    //
    // Validate instancer data.
    //

    const std::string instancerPath = instancer.GetPath().GetString();

    UsdStageWeakPtr stage = instancer.GetPrim().GetStage();

    // Prototypes (required)
    //
    SdfPathVector protoPaths;
    instancer.GetPrototypesRel().GetTargets(&protoPaths);
    if (protoPaths.empty())
    {
        _LogAndSetError(instancerAttrMap, "Instancer has no prototypes");
        return;
    }

    _PathToPrimMap primCache;
    for (auto protoPath : protoPaths) {
        const UsdPrim &protoPrim = stage->GetPrimAtPath(protoPath);
        primCache[protoPath] = protoPrim;
    }

    // Indices (required)
    //
    VtIntArray protoIndices;
    if (!instancer.GetProtoIndicesAttr().Get(&protoIndices, currentTime))
    {
        _LogAndSetError(instancerAttrMap, "Instancer has no prototype indices");
        return;
    }
    const size_t numInstances = protoIndices.size();
    if (numInstances == 0)
    {
        _LogAndSetError(instancerAttrMap, "Instancer has no prototype indices");
        return;
    }
    for (auto protoIndex : protoIndices)
    {
        if (protoIndex < 0 || static_cast<size_t>(protoIndex) >= protoPaths.size())
        {
            _LogAndSetError(instancerAttrMap, TfStringPrintf(
                    "Out of range prototype index %d", protoIndex));
            return;
        }
    }

    // Mask (optional)
    //
    std::vector<bool> pruneMaskValues =
            instancer.ComputeMaskAtTime(currentTime);
    if (!pruneMaskValues.empty() and pruneMaskValues.size() != numInstances)
    {
        _LogAndSetError(instancerAttrMap,
                "Mismatch in length of indices and mask");
        return;
    }

    // Positions (required)
    //
    UsdAttribute positionsAttr = instancer.GetPositionsAttr();
    if (!positionsAttr.HasValue())
    {
        _LogAndSetError(instancerAttrMap, "Instancer has no positions");
        return;
    }

    //
    // Compute instance transform matrices.
    //

    const double timeCodesPerSecond = stage->GetTimeCodesPerSecond();

    // Gather frame-relative sample times and add them to the current time to
    // generate absolute sample times.
    //
    const std::vector<double> &motionSampleTimes =
        data.GetMotionSampleTimes(positionsAttr);
    const size_t sampleCount = motionSampleTimes.size();
    std::vector<UsdTimeCode> sampleTimes(sampleCount);
    for (size_t a = 0; a < sampleCount; ++a)
    {
        sampleTimes[a] = UsdTimeCode(currentTime + motionSampleTimes[a]);
    }

    // Get velocityScale from the opArgs.
    //
    float velocityScale = FnKat::FloatAttribute(
        inputAttrs.getChildByName("opArgs.velocityScale")).getValue(1.0f, false);

    // XXX Replace with UsdGeomPointInstancer::ComputeInstanceTransformsAtTime.
    //
    std::vector<std::vector<GfMatrix4d>> xformSamples(sampleCount);
    const size_t numXformSamples =
        _ComputeInstanceTransformsAtTime(xformSamples, instancer, sampleTimes,
            UsdTimeCode(currentTime), timeCodesPerSecond, numInstances,
            positionsAttr, velocityScale);
    if (numXformSamples == 0) {
        _LogAndSetError(instancerAttrMap, "Could not compute "
                                          "sample/topology-invarying instance "
                                          "transform matrix");
        return;
    }

    //
    // Compute prototype bounds.
    //

    bool aggregateBoundsValid = false;
    std::vector<double> aggregateBounds;

    // XXX Replace with UsdGeomPointInstancer::ComputeExtentAtTime.
    //
    VtVec3fArray aggregateExtent;
    if (_ComputeExtentAtTime(
            aggregateExtent, data.GetUsdInArgs(), xformSamples,
            motionSampleTimes, protoIndices, protoPaths, primCache,
            pruneMaskValues)) {
        aggregateBoundsValid = true;
        aggregateBounds.resize(6);
        aggregateBounds[0] = aggregateExtent[0][0]; // min x
        aggregateBounds[1] = aggregateExtent[1][0]; // max x
        aggregateBounds[2] = aggregateExtent[0][1]; // min y
        aggregateBounds[3] = aggregateExtent[1][1]; // max y
        aggregateBounds[4] = aggregateExtent[0][2]; // min z
        aggregateBounds[5] = aggregateExtent[1][2]; // max z
    }

    //
    // Build sources. Keep track of which instances use them.
    //

    FnGeolibServices::StaticSceneCreateOpArgsBuilder sourcesBldr(false);

    std::vector<int> instanceIndices;
    instanceIndices.reserve(numInstances);

    std::vector<std::string> instanceSources;
    instanceSources.reserve(protoPaths.size());

    std::map<std::string, int> instanceSourceIndexMap;

    std::vector<int> omitList;
    omitList.reserve(numInstances);

    std::map<SdfPath, std::string> protoPathsToKatPaths;

    for (size_t i = 0; i < numInstances; ++i)
    {
        int index = protoIndices[i];

        // Check to see if we are pruned.
        //
        bool isPruned = (!pruneMaskValues.empty() and
                         pruneMaskValues[i] == false);
        if (isPruned)
        {
            omitList.push_back(i);
        }

        const SdfPath &protoPath = protoPaths[index];

        // Compute the full (Katana) path to this prototype.
        //
        std::string fullProtoPath;
        std::map<SdfPath, std::string>::const_iterator pptkpIt =
                protoPathsToKatPaths.find(protoPath);
        if (pptkpIt != protoPathsToKatPaths.end())
        {
            fullProtoPath = pptkpIt->second;
        }
        else
        {
            _PathToPrimMap::const_iterator pcIt = primCache.find(protoPath);
            const UsdPrim &protoPrim = pcIt->second;
            if (!protoPrim) {
                continue;
            }

            // Determine where (what path) to start building the prototype prim
            // such that its material bindings will be preserved. This could be
            // the prototype path itself or an ancestor path.
            //
            SdfPathVector commonPrefixes;

            UsdRelationship materialBindingsRel =
                    UsdShadeMaterial::GetBindingRel(protoPrim);

            auto assetAPI = UsdModelAPI(protoPrim);
            std::string assetName;
            bool isReferencedModelPrim =
                    assetAPI.IsModel() and assetAPI.GetAssetName(&assetName);

            if (!materialBindingsRel or isReferencedModelPrim)
            {
                // The prim has no material bindings or is a referenced model
                // prim (meaning that materials are defined below it); start
                // building at the prototype path.
                //
                commonPrefixes.push_back(protoPath);
            }
            else
            {
                SdfPathVector materialPaths;
                materialBindingsRel.GetForwardedTargets(&materialPaths);
                for (auto materialPath : materialPaths)
                {
                    const SdfPath &commonPrefix =
                            protoPath.GetCommonPrefix(materialPath);
                    if (commonPrefix.GetString() == "/")
                    {
                        // XXX Unhandled case.
                        // The prototype prim and its material are not under the
                        // same parent; start building at the prototype path
                        // (although it is likely that bindings will be broken).
                        //
                        commonPrefixes.push_back(protoPath);
                    }
                    else
                    {
                        // Start building at the common ancestor between the
                        // prototype prim and its material.
                        //
                        commonPrefixes.push_back(commonPrefix);
                    }
                }
            }

            // XXX Unhandled case.
            // We'll use the first common ancestor even if there is more than
            // one (which shouldn't appen if the prototype prim and its bindings
            // are under the same parent).
            //
            SdfPath::RemoveDescendentPaths(&commonPrefixes);
            const std::string buildPath = commonPrefixes[0].GetString();

            // See if the path is a child of the point instancer. If so, we'll
            // match its hierarchy. If not, we'll put it under a 'prototypes'
            // group.
            //
            std::string relBuildPath;
            if (pystring::startswith(buildPath, instancerPath + "/"))
            {
                relBuildPath = pystring::replace(
                        buildPath, instancerPath + "/", "");
            }
            else
            {
                relBuildPath = "prototypes/" +
                        FnGeolibUtil::Path::GetLeafName(buildPath);
            }

            // Start generating the full path to the prototype.
            //
            fullProtoPath = katOutputPath + "/" + relBuildPath;

            // Make the common ancestor our instance source.
            //
            sourcesBldr.setAttrAtLocation(relBuildPath,
                    "type", FnKat::StringAttribute("instance source"));

            // Author a tracking attr.
            //
            sourcesBldr.setAttrAtLocation(relBuildPath,
                    "info.usd.sourceUsdPath",
                    FnKat::StringAttribute(buildPath));

            // Tell the BuildIntermediate op to start building at the common
            // ancestor.
            //
            sourcesBldr.setAttrAtLocation(relBuildPath,
                    "usdPrimPath", FnKat::StringAttribute(buildPath));
            sourcesBldr.setAttrAtLocation(relBuildPath,
                    "usdPrimName", FnKat::StringAttribute("geo"));

            if (protoPath.GetString() != buildPath)
            {
                // Finish generating the full path to the prototype.
                //
                fullProtoPath = fullProtoPath + "/geo" + pystring::replace(
                        protoPath.GetString(), buildPath, "");
            }

            // Create a mapping that will link the instance's index to its
            // prototype's full path.
            //
            instanceSourceIndexMap[fullProtoPath] = instanceSources.size();
            instanceSources.push_back(fullProtoPath);

            // Finally, store the full path in the map so we won't have to do
            // this work again.
            //
            protoPathsToKatPaths[protoPath] = fullProtoPath;
        }

        instanceIndices.push_back(instanceSourceIndexMap[fullProtoPath]);
    }

    //
    // Build instances.
    //

    FnGeolibServices::StaticSceneCreateOpArgsBuilder instancesBldr(false);

    instancesBldr.createEmptyLocation("instances", "instance array");

    instancesBldr.setAttrAtLocation("instances",
            "geometry.instanceSource",
                    FnKat::StringAttribute(instanceSources, 1));

    instancesBldr.setAttrAtLocation("instances",
            "geometry.instanceIndex",
                    FnKat::IntAttribute(&instanceIndices[0],
                            instanceIndices.size(), 1));

    FnKat::DoubleBuilder instanceMatrixBldr(16);
    for (size_t a = 0; a < numXformSamples; ++a) {

        double relSampleTime = motionSampleTimes[a];

        // Shove samples into the builder at the frame-relative sample time. If
        // motion is backwards, make sure to reverse time samples.
        std::vector<double> &matVec = instanceMatrixBldr.get(
            data.IsMotionBackward()
                ? PxrUsdKatanaUtils::ReverseTimeSample(relSampleTime)
                : relSampleTime);

        matVec.reserve(16 * numInstances);
        for (size_t i = 0; i < numInstances; ++i) {

            GfMatrix4d instanceXform = xformSamples[a][i];
            const double *matArray = instanceXform.GetArray();

            for (int j = 0; j < 16; ++j) {
                matVec.push_back(matArray[j]);
            }
        }
    }
    instancesBldr.setAttrAtLocation("instances",
            "geometry.instanceMatrix", instanceMatrixBldr.build());

    if (!omitList.empty())
    {
        instancesBldr.setAttrAtLocation("instances",
                "geometry.omitList",
                        FnKat::IntAttribute(&omitList[0], omitList.size(), 1));
    }

    instancesBldr.setAttrAtLocation("instances",
            "geometry.pointInstancerId",
                    FnKat::StringAttribute(katOutputPath));

    //
    // Transfer primvars.
    //

    FnKat::GroupBuilder instancerPrimvarsBldr;
    FnKat::GroupBuilder instancesPrimvarsBldr;
    for (int64_t i = 0; i < primvarAttrs.getNumberOfChildren(); ++i)
    {
        const std::string primvarName = primvarAttrs.getChildName(i);

        // Use "point" scope for the instancer.
        instancerPrimvarsBldr.set(primvarName, primvarAttrs.getChildByIndex(i));
        instancerPrimvarsBldr.set(primvarName + ".scope",
                FnKat::StringAttribute("point"));

        // User "primitive" scope for the instances.
        instancesPrimvarsBldr.set(primvarName, primvarAttrs.getChildByIndex(i));
        instancesPrimvarsBldr.set(primvarName + ".scope",
                FnKat::StringAttribute("primitive"));
    }
    instancerAttrMap.set("geometry.arbitrary", instancerPrimvarsBldr.build());
    instancesBldr.setAttrAtLocation("instances",
            "geometry.arbitrary", instancesPrimvarsBldr.build());

    //
    // Set the final aggregate bounds.
    //

    if (aggregateBoundsValid)
    {
        instancerAttrMap.set("bound", FnKat::DoubleAttribute(&aggregateBounds[0], 6, 2));
    }

    //
    // Set proxy attrs.
    //

    instancerAttrMap.set("proxies", PxrUsdKatanaUtils::GetViewerProxyAttr(data));

    //
    // Transfer builder results to our attr maps.
    //

    FnKat::GroupAttribute sourcesAttrs = sourcesBldr.build();
    for (int64_t i = 0; i < sourcesAttrs.getNumberOfChildren(); ++i)
    {
        sourcesAttrMap.set(
                sourcesAttrs.getChildName(i),
                sourcesAttrs.getChildByIndex(i));
    }

    FnKat::GroupAttribute instancesAttrs = instancesBldr.build();
    for (int64_t i = 0; i < instancesAttrs.getNumberOfChildren(); ++i)
    {
        instancesAttrMap.set(
                instancesAttrs.getChildName(i),
                instancesAttrs.getChildByIndex(i));
    }
}
Exemplo n.º 16
0
void
GlfSimpleLightingContext::SetStateFromOpenGL()
{
    // import classic GL light's parameters into shaded lights
    SetUseLighting(glIsEnabled(GL_LIGHTING));

    GfMatrix4d worldToViewMatrix;
    glGetDoublev(GL_MODELVIEW_MATRIX, worldToViewMatrix.GetArray());
    GfMatrix4d viewToWorldMatrix = worldToViewMatrix.GetInverse();

    GLint nLights = 0;
    glGetIntegerv(GL_MAX_LIGHTS, &nLights);

    GlfSimpleLightVector lights;
    lights.reserve(nLights);

    GlfSimpleLight light;
    for(int i = 0; i < nLights; ++i)
    {
        int lightName = GL_LIGHT0 + i;
        if (glIsEnabled(lightName)) {
            GLfloat position[4], color[4];

            glGetLightfv(lightName, GL_POSITION, position);
            light.SetPosition(GfVec4f(position)*viewToWorldMatrix);
            
            glGetLightfv(lightName, GL_AMBIENT, color);
            light.SetAmbient(GfVec4f(color));
            
            glGetLightfv(lightName, GL_DIFFUSE, color);
            light.SetDiffuse(GfVec4f(color));
            
            glGetLightfv(lightName, GL_SPECULAR, color);
            light.SetSpecular(GfVec4f(color));

            lights.push_back(light);
        }
    }

    SetLights(lights);

    GlfSimpleMaterial material;

    GLfloat color[4], shininess;
    glGetMaterialfv(GL_FRONT, GL_AMBIENT, color);
    material.SetAmbient(GfVec4f(color));
    glGetMaterialfv(GL_FRONT, GL_DIFFUSE, color);
    material.SetDiffuse(GfVec4f(color));
    glGetMaterialfv(GL_FRONT, GL_SPECULAR, color);
    material.SetSpecular(GfVec4f(color));
    glGetMaterialfv(GL_FRONT, GL_EMISSION, color);
    material.SetEmission(GfVec4f(color));
    glGetMaterialfv(GL_FRONT, GL_SHININESS, &shininess);
    // clamp to 0.0001, since pow(0,0) is undefined in GLSL.
    shininess = std::max(0.0001f, shininess);
    material.SetShininess(shininess);

    SetMaterial(material);

    GfVec4f sceneAmbient;
    glGetFloatv(GL_LIGHT_MODEL_AMBIENT, &sceneAmbient[0]);
    SetSceneAmbient(sceneAmbient);
}
Exemplo n.º 17
0
Arquivo: frustum.cpp Projeto: JT-a/USD
GfFrustum &
GfFrustum::Transform(const GfMatrix4d &matrix)
{
    // We'll need the old parameters as we build up the new ones, so, work
    // on a newly instantiated frustum. We'll replace the contents of
    // this frustum with it once we are done. Note that _dirty is true
    // by default, so, there is no need to initialize it here.
    GfFrustum frustum;

    // Copy the projection type
    frustum._projectionType = _projectionType;

    // Transform the position of the frustum
    frustum._position = matrix.Transform(_position);

    // Transform the rotation as follows:
    //   1. build view and direction vectors
    //   2. transform them with the given matrix
    //   3. normalize the vectors and cross them to build an orthonormal frame
    //   4. construct a rotation matrix
    //   5. extract the new rotation from the matrix
    
    // Generate view direction and up vector
    GfVec3d viewDir = ComputeViewDirection();
    GfVec3d upVec   = ComputeUpVector();

    // Transform by matrix
    GfVec3d viewDirPrime = matrix.TransformDir(viewDir);
    GfVec3d upVecPrime = matrix.TransformDir(upVec);

    // Normalize. Save the vec size since it will be used to scale near/far.
    double scale = viewDirPrime.Normalize();
    upVecPrime.Normalize();

    // Cross them to get the third axis. Voila. We have an orthonormal frame.
    GfVec3d viewRightPrime = GfCross(viewDirPrime, upVecPrime);
    viewRightPrime.Normalize();

    // Construct a rotation matrix using the axes.
    //
    //  [ right     0 ]
    //  [ up        1 ]
    //  [ -viewDir  0 ]
    //  [ 0  0   0  1 ]
    GfMatrix4d rotMatrix;
    rotMatrix.SetIdentity();
    // first row
    rotMatrix[0][0] = viewRightPrime[0];
    rotMatrix[0][1] = viewRightPrime[1];
    rotMatrix[0][2] = viewRightPrime[2];

    // second row
    rotMatrix[1][0] = upVecPrime[0];
    rotMatrix[1][1] = upVecPrime[1];
    rotMatrix[1][2] = upVecPrime[2];

    // third row
    rotMatrix[2][0] = -viewDirPrime[0];
    rotMatrix[2][1] = -viewDirPrime[1];
    rotMatrix[2][2] = -viewDirPrime[2];

    // Extract rotation
    frustum._rotation = rotMatrix.ExtractRotation();

    // Since we applied the matrix to the direction vector, we can use
    // its length to find out the scaling that needs to applied to the
    // near and far plane. 
    frustum._nearFar = _nearFar * scale;

    // Use the same length to scale the view distance
    frustum._viewDistance = _viewDistance * scale;

    // Transform the reference plane as follows:
    //
    //   - construct two 3D points that are on the reference plane 
    //     (left/bottom and right/top corner of the reference window) 
    //   - transform the points with the given matrix
    //   - move the window back to one unit from the viewpoint and
    //     extract the 2D coordinates that would form the new reference
    //     window
    //
    //     A note on how we do the last "move" of the reference window:
    //     Using similar triangles and the fact that the reference window
    //     is one unit away from the viewpoint, one can show that it's 
    //     sufficient to divide the x and y components of the transformed
    //     corners by the length of the transformed direction vector.
    //
    //     A 2D diagram helps:
    //
    //                            |
    //                            |
    //               |            |
    //       * ------+------------+
    //      vp       |y1          |
    //                            |
    //       \--d1--/             |y2
    //
    //       \-------d2----------/
    //
    //     So, y1/y2 = d1/d2 ==> y1 = y2 * d1/d2 
    //     Since d1 = 1 ==> y1 = y2 / d2
    //     The same argument applies to the x coordinate.
    //
    // NOTE: In an orthographic projection, the last step (division by
    // the length of the vector) is skipped.
    //
    // XXX NOTE2:  The above derivation relies on the
    // fact that GetReferecePlaneDepth() is 1.0.
    // If we ever allow this to NOT be 1, we'll need to fix this up.

    const GfVec2d &min = _window.GetMin();
    const GfVec2d &max = _window.GetMax();

    // Construct the corner points in 3D as follows: construct a starting 
    // point by using the x and y coordinates of the reference plane and 
    // -1 as the z coordinate. Add the position of the frustum to generate 
    // the actual points in world-space coordinates.
    GfVec3d leftBottom = 
        _position + _rotation.TransformDir(GfVec3d(min[0], min[1], -1.0));
    GfVec3d rightTop = 
        _position + _rotation.TransformDir(GfVec3d(max[0], max[1], -1.0));

    // Now, transform the corner points by the given matrix
    leftBottom = matrix.Transform(leftBottom);
    rightTop   = matrix.Transform(rightTop);

    // Subtract the transformed frustum position from the transformed
    // corner points. Then, rotate the points using the rotation that would
    // transform the view direction vector back to (0, 0, -1). This brings 
    // the corner points from the woorld coordinate system into the local 
    // frustum one.
    leftBottom -= frustum._position;
    rightTop   -= frustum._position;
    leftBottom = frustum._rotation.GetInverse().TransformDir(leftBottom);
    rightTop   = frustum._rotation.GetInverse().TransformDir(rightTop);

    // Finally, use the similar triangles trick to bring the corner
    // points back at one unit away from the point. These scaled x and
    // y coordinates can be directly used to construct the new
    // transformed reference plane.  Skip the scaling step for an
    // orthographic projection, though.
    if (_projectionType == Perspective) {
        leftBottom /= scale;
        rightTop   /= scale;
    }

    frustum._window.SetMin(GfVec2d(leftBottom[0], leftBottom[1]));
    frustum._window.SetMax(GfVec2d(rightTop[0],   rightTop[1]));

    // Note that negative scales in the transform have the potential
    // to flip the window.  Fix it if necessary.
    GfVec2d wMin = frustum._window.GetMin();
    GfVec2d wMax = frustum._window.GetMax();
    // Make sure left < right
    if ( wMin[0] > wMax[0] ) {
        std::swap( wMin[0], wMax[0] );
    }
    // Make sure bottom < top
    if ( wMin[1] > wMax[1] ) {
        std::swap( wMin[1], wMax[1] );
    }
    frustum._window.SetMin( wMin );
    frustum._window.SetMax( wMax );

    *this = frustum;

    return *this;
}
Exemplo n.º 18
0
Arquivo: frustum.cpp Projeto: JT-a/USD
void
GfFrustum::_CalculateFrustumPlanes() const
{
    if (!_planes.empty())
        return;

    _planes.reserve(6);

    // These are values we need to construct the planes.
    const GfVec2d &winMin = _window.GetMin();
    const GfVec2d &winMax = _window.GetMax();
    double near           = _nearFar.GetMin();
    double far            = _nearFar.GetMax();
    GfMatrix4d m          = ComputeViewInverse();

    // For a perspective frustum, we use the viewpoint and four
    // corners of the near-plane frustum rectangle to define the 4
    // planes forming the left, right, top, and bottom sides of the
    // frustum.
    if (_projectionType == GfFrustum::Perspective) {

        //
        // Get the eye-space viewpoint (the origin) and the four corners
        // of the near-plane frustum rectangle using similar triangles.
        //
        // This picture may help:   
        //                 
        //                  top of near plane
        //                  frustum rectangle
        //
        //                  + --
        //                / |  | 
        //              /   |  |
        //            /     |  | h
        //          /       |  |
        //        /         |  |                 
        //   vp +-----------+ --               
        //                    center of near plane frustum rectangle
        //      |___________|
        //           near    
        //
        // The height (h) of this triangle is found by the following
        // equation, based on the definition of the _window member
        // variable, which is the size of the image rectangle in the
        // reference plane (a distance of 1 from the viewpoint):
        //
        //      h       _window.GetMax()[1]
        //    ------ = --------------------
        //     near             1
        //
        // Solving for h gets the height of the triangle. Doing the
        // similar math for the other 3 sizes of the near-plane
        // rectangle is left as an exercise for the reader.
        //
        // XXX Note: If we ever allow reference plane depth to be other 
        // than 1.0, we'll need to revisit this.

        GfVec3d vp(0.0, 0.0, 0.0);
        GfVec3d lb(near * winMin[0], near * winMin[1], -near);
        GfVec3d rb(near * winMax[0], near * winMin[1], -near);
        GfVec3d lt(near * winMin[0], near * winMax[1], -near);
        GfVec3d rt(near * winMax[0], near * winMax[1], -near);

        // Transform all 5 points into world space by the inverse of the
        // view matrix (which converts from world space to eye space).
        vp = m.Transform(vp);
        lb = m.Transform(lb);
        rb = m.Transform(rb);
        lt = m.Transform(lt);
        rt = m.Transform(rt);

        // Construct the 6 planes. The three points defining each plane
        // should obey the right-hand-rule; they should be in counter-clockwise 
        // order on the inside of the frustum. This makes the intersection of 
        // the half-spaces defined by the planes the contents of the frustum.
        _planes.push_back( GfPlane(vp, lb, lt) );     // Left
        _planes.push_back( GfPlane(vp, rt, rb) );     // Right
        _planes.push_back( GfPlane(vp, rb, lb) );     // Bottom
        _planes.push_back( GfPlane(vp, lt, rt) );     // Top
        _planes.push_back( GfPlane(rb, lb, lt) );     // Near
    }

    // For an orthographic projection, we need only the four corners
    // of the near-plane frustum rectangle and the view direction to
    // define the 4 planes forming the left, right, top, and bottom
    // sides of the frustum.
    else {

        //
        // The math here is much easier than in the perspective case,
        // because we have parallel lines instead of triangles. Just
        // use the size of the image rectangle in the reference plane,
        // which is the same in the near plane.
        //
        GfVec3d lb(winMin[0], winMin[1], -near);
        GfVec3d rb(winMax[0], winMin[1], -near);
        GfVec3d lt(winMin[0], winMax[1], -near);
        GfVec3d rt(winMax[0], winMax[1], -near);

        // Transform the 4 points into world space by the inverse of
        // the view matrix (which converts from world space to eye
        // space).
        lb = m.Transform(lb);
        rb = m.Transform(rb);
        lt = m.Transform(lt);
        rt = m.Transform(rt);

        // Transform the canonical view direction (-z axis) into world
        // space.
        GfVec3d dir = m.TransformDir(-GfVec3d::ZAxis());

        // Construct the 5 planes from these 4 points and the
        // eye-space view direction.
        _planes.push_back( GfPlane(lt + dir, lt, lb) );       // Left
        _planes.push_back( GfPlane(rb + dir, rb, rt) );       // Right
        _planes.push_back( GfPlane(lb + dir, lb, rb) );       // Bottom
        _planes.push_back( GfPlane(rt + dir, rt, lt) );       // Top
        _planes.push_back( GfPlane(rb, lb, lt) );             // Near
    }

    // The far plane is the opposite to the near plane. To compute the 
    // distance from the origin for the far plane, we take the distance 
    // for the near plane, add the difference between the far and the near 
    // and then negate that. We do the negation since the far plane
    // faces the opposite direction. A small drawing would help:
    //
    //                               far - near
    //                     /---------------------------\ *
    //
    //        |           |                             |
    //        |           |                             |
    //        |           |                             |
    //   <----|---->      |                             |
    // fnormal|nnormal    |                             |
    //        |           |                             |
    //                near plane                     far plane
    //
    //         \---------/ *
    //          ndistance
    //         
    //         \---------------------------------------/ *
    //                         fdistance
    //
    // So, fdistance = - (ndistance + (far - near))
    _planes.push_back(
        GfPlane(-_planes[4].GetNormal(), 
                -(_planes[4].GetDistanceFromOrigin() + (far - near))) );
}
Exemplo n.º 19
0
void
GlfSimpleLightingContext::BindUniformBlocks(GlfBindingMapPtr const &bindingMap)
{
    if (not _lightingUniformBlock)
        _lightingUniformBlock = GlfUniformBlock::New();
    if (not _shadowUniformBlock)
        _shadowUniformBlock = GlfUniformBlock::New();
    if (not _materialUniformBlock)
        _materialUniformBlock = GlfUniformBlock::New();

    bool shadowExists = false;
    if ((not _lightingUniformBlockValid or
         not _shadowUniformBlockValid) and _lights.size() > 0) {
        int numLights = GetNumLightsUsed();

        // 16byte aligned
        struct LightSource {
            float position[4];
            float ambient[4];
            float diffuse[4];
            float specular[4];
            float spotDirection[4];
            float spotCutoff;
            float spotFalloff;
            float padding[2];
            float attenuation[4];
            bool hasShadow;
            int32_t shadowIndex;
            int32_t padding2[2];
        };

        struct Lighting {
            int32_t useLighting;
            int32_t useColorMaterialDiffuse;
            int32_t padding[2];
            LightSource lightSource[0];
        };

        // 16byte aligned
        struct ShadowMatrix {
            float viewToShadowMatrix[16];
            float basis0[4];
            float basis1[4];
            float basis2[4];
            float bias;
            float padding[3];
        };

        struct Shadow {
            ShadowMatrix shadow[0];
        };

        size_t lightingSize = sizeof(Lighting) + sizeof(LightSource) * numLights;
        size_t shadowSize = sizeof(ShadowMatrix) * numLights;
        Lighting *lightingData = (Lighting *)alloca(lightingSize);
        Shadow *shadowData = (Shadow *)alloca(shadowSize);

        memset(shadowData, 0, shadowSize);
        memset(lightingData, 0, lightingSize);

        GfMatrix4d viewToWorldMatrix = _worldToViewMatrix.GetInverse();

        lightingData->useLighting = _useLighting;
        lightingData->useColorMaterialDiffuse = _useColorMaterialDiffuse;

        for (int i = 0; _useLighting and i < numLights; ++i) {
            GlfSimpleLight const &light = _lights[i];

            setVec4(lightingData->lightSource[i].position,
                    light.GetPosition() * _worldToViewMatrix);
            setVec4(lightingData->lightSource[i].diffuse, light.GetDiffuse());
            setVec4(lightingData->lightSource[i].ambient, light.GetAmbient());
            setVec4(lightingData->lightSource[i].specular, light.GetSpecular());
            setVec3(lightingData->lightSource[i].spotDirection,
                    _worldToViewMatrix.TransformDir(light.GetSpotDirection()));
            setVec3(lightingData->lightSource[i].attenuation,
                    light.GetAttenuation());
            lightingData->lightSource[i].spotCutoff = light.GetSpotCutoff();
            lightingData->lightSource[i].spotFalloff = light.GetSpotFalloff();
            lightingData->lightSource[i].hasShadow = light.HasShadow();

            if (lightingData->lightSource[i].hasShadow) {
                int shadowIndex = light.GetShadowIndex();
                lightingData->lightSource[i].shadowIndex = shadowIndex;

                GfMatrix4d viewToShadowMatrix = viewToWorldMatrix *
                    _shadows->GetWorldToShadowMatrix(shadowIndex);

                double invBlur = 1.0/(std::max(0.0001F, light.GetShadowBlur()));
                GfMatrix4d mat = viewToShadowMatrix.GetInverse();
                GfVec4f xVec = GfVec4f(mat.GetRow(0) * invBlur);
                GfVec4f yVec = GfVec4f(mat.GetRow(1) * invBlur);
                GfVec4f zVec = GfVec4f(mat.GetRow(2));

                shadowData->shadow[shadowIndex].bias = light.GetShadowBias();
                setMatrix(shadowData->shadow[shadowIndex].viewToShadowMatrix,
                          viewToShadowMatrix);
                setVec4(shadowData->shadow[shadowIndex].basis0, xVec);
                setVec4(shadowData->shadow[shadowIndex].basis1, yVec);
                setVec4(shadowData->shadow[shadowIndex].basis2, zVec);

                shadowExists = true;
            }
        }

        _lightingUniformBlock->Update(lightingData, lightingSize);
        _lightingUniformBlockValid = true;

        if (shadowExists) {
            _shadowUniformBlock->Update(shadowData, shadowSize);
            _shadowUniformBlockValid = true;
        }
    }

    _lightingUniformBlock->Bind(bindingMap, _tokens->lightingUB);

    if (shadowExists) {
        _shadowUniformBlock->Bind(bindingMap, _tokens->shadowUB);
    }

    if (not _materialUniformBlockValid) {
        // has to be matched with the definition of simpleLightingShader.glslfx
        struct Material {
            float ambient[4];
            float diffuse[4];
            float specular[4];
            float emission[4];
            float sceneColor[4];  // XXX: should be separated?
            float shininess;
            float padding[3];
        } materialData;

        memset(&materialData, 0, sizeof(materialData));

        setVec4(materialData.ambient, _material.GetAmbient());
        setVec4(materialData.diffuse, _material.GetDiffuse());
        setVec4(materialData.specular, _material.GetSpecular());
        setVec4(materialData.emission, _material.GetEmission());
        materialData.shininess = _material.GetShininess();
        setVec4(materialData.sceneColor, _sceneAmbient);

        _materialUniformBlock->Update(&materialData, sizeof(materialData));
        _materialUniformBlockValid = true;
    }

    _materialUniformBlock->Bind(bindingMap, _tokens->materialUB);
}
Exemplo n.º 20
0
Arquivo: rgb.cpp Projeto: 400dama/USD
GfRGB GfRGB::Transform(const GfMatrix4d &m) const
{
    return GfRGB(m.TransformDir(_rgb));
}
Exemplo n.º 21
0
/* static */
GT_DataArrayHandle
GusdPrimWrapper::convertPrimvarData( const UsdGeomPrimvar& primvar, UsdTimeCode time ) {

    SdfValueTypeName typeName = primvar.GetTypeName();
    if( typeName == SdfValueTypeNames->Int )
    {
        int usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Int32Array( &usdVal, 1, 1 );
    }
    else if( typeName == SdfValueTypeNames->Int64 )
    {
        int64_t usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Int64Array( &usdVal, 1, 1 );
    }
    else if( typeName == SdfValueTypeNames->Float )
    {
        float usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real32Array( &usdVal, 1, 1 );
    }
    else if( typeName == SdfValueTypeNames->Double )
    {
        double usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real64Array( &usdVal, 1, 1 );
    }
    else if( typeName == SdfValueTypeNames->Float3 )
    {
        GfVec3f usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real32Array( usdVal.data(), 1, 3 );
    }
    else if( typeName == SdfValueTypeNames->Double3 )
    {
        GfVec3d usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real64Array( usdVal.data(), 1, 3 );
    }
    else if( typeName == SdfValueTypeNames->Color3f )
    {
        GfVec3f usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real32Array( usdVal.data(), 1, 3, GT_TYPE_COLOR );
    }
    else if( typeName == SdfValueTypeNames->Color3d )
    {
        GfVec3d usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real64Array( usdVal.data(), 1, 3, GT_TYPE_COLOR );
    }
    else if( typeName == SdfValueTypeNames->Normal3f )
    {
        GfVec3f usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real32Array( usdVal.data(), 1, 3, GT_TYPE_NORMAL );
    }
    else if( typeName == SdfValueTypeNames->Normal3d )
    {
        GfVec3d usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real64Array( usdVal.data(), 1, 3, GT_TYPE_NORMAL );
    }
    else if( typeName == SdfValueTypeNames->Point3f )
    {
        GfVec3f usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real32Array( usdVal.data(), 1, 3, GT_TYPE_POINT );
    }
    else if( typeName == SdfValueTypeNames->Point3d )
    {
        GfVec3d usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real64Array( usdVal.data(), 1, 3, GT_TYPE_POINT );
    }
    else if( typeName == SdfValueTypeNames->Float4 )
    {
        GfVec4f usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real32Array( usdVal.data(), 1, 4 );
    }
    else if( typeName == SdfValueTypeNames->Double4 )
    {
        GfVec4d usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real64Array( usdVal.data(), 1, 4 );
    }
    else if( typeName == SdfValueTypeNames->Quatf )
    {
        GfVec4f usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real32Array( usdVal.data(), 1, 4, GT_TYPE_QUATERNION );
    }
    else if( typeName == SdfValueTypeNames->Quatd )
    {
        GfVec4d usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real64Array( usdVal.data(), 1, 4, GT_TYPE_QUATERNION );
    }
    else if( typeName == SdfValueTypeNames->Matrix3d )
    {
        GfMatrix3d usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real64Array( usdVal.GetArray(), 1, 9, GT_TYPE_MATRIX3 );
    }
    else if( typeName == SdfValueTypeNames->Matrix4d ||
             typeName == SdfValueTypeNames->Frame4d )
    {
        GfMatrix4d usdVal;
        primvar.Get( &usdVal, time );

        return new GT_Real64Array( usdVal.GetArray(), 1, 16, GT_TYPE_MATRIX );
    }
    else if( typeName == SdfValueTypeNames->String )
    {
        string usdVal;
        primvar.Get( &usdVal, time );

        auto     gtString = new GT_DAIndexedString( 1 );
        gtString->setString( 0, 0, usdVal.c_str() );
        return gtString;
    }
    else if( typeName == SdfValueTypeNames->StringArray )
    {
        VtArray<string> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        auto gtString = new GT_DAIndexedString( usdVal.size() );
        for( size_t i = 0; i < usdVal.size(); ++i )
            gtString->setString( i, 0, usdVal[i].c_str() );
        return gtString;
    }
    else if( typeName == SdfValueTypeNames->IntArray )
    {
        VtArray<int> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<int>(usdVal);
    }
    else if( typeName == SdfValueTypeNames->Int64Array )
    {
        VtArray<int64_t> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<int64_t>(usdVal);
    }
    else if( typeName == SdfValueTypeNames->FloatArray )
    {
        VtArray<float> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<float>(usdVal);
    }
    else if( typeName == SdfValueTypeNames->DoubleArray )
    {
        VtArray<double> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<double>(usdVal);
    }
    else if( typeName == SdfValueTypeNames->Float2Array )
    {
        VtArray<GfVec2f> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec2f>(usdVal);
    }
    else if( typeName == SdfValueTypeNames->Double2Array )
    {
        VtArray<GfVec2d> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec2d>(usdVal);
    }
    else if( typeName == SdfValueTypeNames->Float3Array )
    {
        VtArray<GfVec3f> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec3f>(usdVal);
    }
    else if( typeName == SdfValueTypeNames->Double3Array )
    {
        VtArray<GfVec3d> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec3d>(usdVal);
    }
    else if( typeName == SdfValueTypeNames->Color3fArray )
    {
        VtArray<GfVec3f> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec3f>(usdVal,GT_TYPE_COLOR);
    }
    else if( typeName == SdfValueTypeNames->Color3dArray )
    {
        VtArray<GfVec3d> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec3d>(usdVal,GT_TYPE_COLOR);
    }
    else if( typeName == SdfValueTypeNames->Vector3fArray )
    {
        VtArray<GfVec3f> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec3f>(usdVal, GT_TYPE_VECTOR);
    }
    else if( typeName == SdfValueTypeNames->Vector3dArray )
    {
        VtArray<GfVec3d> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec3d>(usdVal, GT_TYPE_VECTOR);
    }
    else if( typeName == SdfValueTypeNames->Normal3fArray )
    {
        VtArray<GfVec3f> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec3f>(usdVal, GT_TYPE_NORMAL);
    }
    else if( typeName == SdfValueTypeNames->Normal3dArray )
    {
        VtArray<GfVec3d> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec3d>(usdVal, GT_TYPE_NORMAL);
    }
    else if( typeName == SdfValueTypeNames->Point3fArray )
    {
        VtArray<GfVec3f> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec3f>(usdVal, GT_TYPE_POINT);
    }
    else if( typeName == SdfValueTypeNames->Point3dArray )
    {
        VtArray<GfVec3d> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec3d>(usdVal, GT_TYPE_POINT);
    }
    else if( typeName == SdfValueTypeNames->Float4Array )
    {
        VtArray<GfVec4f> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec4f>(usdVal);
    }
    else if( typeName == SdfValueTypeNames->Double4Array )
    {
        VtArray<GfVec4d> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec4d>(usdVal);
    }
    else if( typeName == SdfValueTypeNames->QuatfArray )
    {
        VtArray<GfVec4f> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec4f>(usdVal, GT_TYPE_QUATERNION);
    }
    else if( typeName == SdfValueTypeNames->QuatdArray )
    {
        VtArray<GfVec4d> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfVec4d>(usdVal, GT_TYPE_QUATERNION);
    }
    else if( typeName == SdfValueTypeNames->Matrix3dArray )
    {
        VtArray<GfMatrix3d> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfMatrix3d>(usdVal, GT_TYPE_MATRIX3);
    }
    else if( typeName == SdfValueTypeNames->Matrix4dArray ||
             typeName == SdfValueTypeNames->Frame4dArray )
    {
        VtArray<GfMatrix4d> usdVal;
        primvar.ComputeFlattened( &usdVal, time );
        return new GusdGT_VtArray<GfMatrix4d>(usdVal, GT_TYPE_MATRIX);
    }
    return NULL;
}
Exemplo n.º 22
0
void Xform::updateSample(Time t_)
{
    super::updateSample(t_);
    if (m_update_flag.bits == 0) {
        m_sample.flags = (m_sample.flags & ~(int)XformData::Flags::UpdatedMask);
        return;
    }
    if (m_update_flag.variant_set_changed) { m_summary_needs_update = true; }

    auto t = UsdTimeCode(t_);
    const auto& conf = getImportSettings();

    auto& sample = m_sample;
    auto prev = sample;

    if (m_summary.type == XformSummary::Type::TRS) {
        auto translate  = float3::zero();
        auto scale      = float3::one();
        auto rotation   = quatf::identity();

        for (auto& op : m_read_ops) {
            switch (op.GetOpType()) {
            case UsdGeomXformOp::TypeTranslate:
            {
                float3 tmp;
                op.GetAs((GfVec3f*)&tmp, t);
                translate += tmp;
                break;
            }
            case UsdGeomXformOp::TypeScale:
            {
                float3 tmp;
                op.GetAs((GfVec3f*)&tmp, t);
                scale *= tmp;
                break;
            }
            case UsdGeomXformOp::TypeOrient:
            {
                quatf tmp;
                op.GetAs((GfQuatf*)&tmp, t);
                rotation *= tmp;
                break;
            }
            case UsdGeomXformOp::TypeRotateX:
            {
                float angle;
                op.GetAs(&angle, t);
                rotation *= rotateX(angle * Deg2Rad);
                break;
            }
            case UsdGeomXformOp::TypeRotateY:
            {
                float angle;
                op.GetAs(&angle, t);
                rotation *= rotateY(angle * Deg2Rad);
                break;
            }
            case UsdGeomXformOp::TypeRotateZ:
            {
                float angle;
                op.GetAs(&angle, t);
                rotation *= rotateZ(angle * Deg2Rad);
                break;
            }
            case UsdGeomXformOp::TypeRotateXYZ: // 
            case UsdGeomXformOp::TypeRotateXZY: // 
            case UsdGeomXformOp::TypeRotateYXZ: // 
            case UsdGeomXformOp::TypeRotateYZX: // 
            case UsdGeomXformOp::TypeRotateZXY: // 
            case UsdGeomXformOp::TypeRotateZYX: // fall through
            {
                float3 euler;
                op.GetAs((GfVec3f*)&euler, t);
                rotation *= EulerToQuaternion(euler * Deg2Rad, op.GetOpType());
                break;
            }
            default:
                break;
            }
        }

        if (conf.swap_handedness) {
            translate.x *= -1.0f;
            rotation = swap_handedness(sample.rotation);
        }
        sample.position = translate;
        sample.rotation = rotation;
        sample.scale = scale;
    }
    else {
        GfMatrix4d result;
        result.SetIdentity();
        for (auto& op : m_read_ops) {
            auto m = op.GetOpTransform(t);
            result = m * result;
        }

        GfTransform gft;
        gft.SetMatrix(result);

        (GfMatrix4f&)sample.transform = GfMatrix4f(result);
        (GfVec3f&)sample.position = GfVec3f(gft.GetTranslation());
        (GfQuatf&)sample.rotation = GfQuatf(gft.GetRotation().GetQuat());
        (GfVec3f&)sample.scale = GfVec3f(gft.GetScale());
    }

    int update_flags = 0;
    if (!near_equal(prev.position, sample.position)) {
        update_flags |= (int)XformData::Flags::UpdatedPosition;
    }
    if (!near_equal(prev.rotation, sample.rotation)) {
        update_flags |= (int)XformData::Flags::UpdatedRotation;
    }
    if (!near_equal(prev.scale, sample.scale)) {
        update_flags |= (int)XformData::Flags::UpdatedScale;
    }
    sample.flags = (sample.flags & ~(int)XformData::Flags::UpdatedMask) | update_flags;
}
Exemplo n.º 23
0
Arquivo: rprim.cpp Projeto: MWDD/USD
void
HdRprim::_PopulateConstantPrimVars(HdSceneDelegate* delegate,
                                   HdDrawItem *drawItem,
                                   HdDirtyBits *dirtyBits)
{
    HD_TRACE_FUNCTION();
    HF_MALLOC_TAG_FUNCTION();

    SdfPath const& id = GetId();
    HdRenderIndex &renderIndex = delegate->GetRenderIndex();
    HdResourceRegistry *resourceRegistry = &HdResourceRegistry::GetInstance();


    // XXX: this should be in a different method
    // XXX: This should be in HdSt getting the HdSt Shader
    const HdShader *shader = static_cast<const HdShader *>(
                                  renderIndex.GetSprim(HdPrimTypeTokens->shader,
                                                       _surfaceShaderID));

    if (shader == nullptr) {
        shader = static_cast<const HdShader *>(
                        renderIndex.GetFallbackSprim(HdPrimTypeTokens->shader));
    }

    _sharedData.surfaceShader = shader->GetShaderCode();


    // update uniforms
    HdBufferSourceVector sources;
    if (HdChangeTracker::IsTransformDirty(*dirtyBits, id)) {
        GfMatrix4d transform = delegate->GetTransform(id);
        _sharedData.bounds.SetMatrix(transform); // for CPU frustum culling

        HdBufferSourceSharedPtr source(new HdVtBufferSource(
                                           HdTokens->transform,
                                           transform));
        sources.push_back(source);
        source.reset(new HdVtBufferSource(HdTokens->transformInverse,
                                          transform.GetInverse()));
        sources.push_back(source);

        // if this is a prototype (has instancer),
        // also push the instancer transform separately.
        if (!_instancerID.IsEmpty()) {
            // gather all instancer transforms in the instancing hierarchy
            VtMatrix4dArray rootTransforms = _GetInstancerTransforms(delegate);
            VtMatrix4dArray rootInverseTransforms(rootTransforms.size());
            bool leftHanded = transform.IsLeftHanded();
            for (size_t i = 0; i < rootTransforms.size(); ++i) {
                rootInverseTransforms[i] = rootTransforms[i].GetInverse();
                // flip the handedness if necessary
                leftHanded ^= rootTransforms[i].IsLeftHanded();
            }

            source.reset(new HdVtBufferSource(
                             HdTokens->instancerTransform,
                             rootTransforms, /*staticArray=*/true));
            sources.push_back(source);
            source.reset(new HdVtBufferSource(
                             HdTokens->instancerTransformInverse,
                             rootInverseTransforms, /*staticArray=*/true));
            sources.push_back(source);

            // XXX: It might be worth to consider to have isFlipped
            // for non-instanced prims as well. It can improve
            // the drawing performance on older-GPUs by reducing
            // fragment shader cost, although it needs more GPU memory.

            // set as int (GLSL needs 32-bit align for bool)
            source.reset(new HdVtBufferSource(
                             HdTokens->isFlipped, VtValue(int(leftHanded))));
            sources.push_back(source);
        }
    }
    if (HdChangeTracker::IsExtentDirty(*dirtyBits, id)) {
        _sharedData.bounds.SetRange(GetExtent(delegate));

        GfVec3d const & localMin = drawItem->GetBounds().GetBox().GetMin();
        HdBufferSourceSharedPtr sourceMin(new HdVtBufferSource(
                                           HdTokens->bboxLocalMin,
                                           VtValue(GfVec4f(
                                               localMin[0],
                                               localMin[1],
                                               localMin[2], 0))));
        sources.push_back(sourceMin);

        GfVec3d const & localMax = drawItem->GetBounds().GetBox().GetMax();
        HdBufferSourceSharedPtr sourceMax(new HdVtBufferSource(
                                           HdTokens->bboxLocalMax,
                                           VtValue(GfVec4f(
                                               localMax[0],
                                               localMax[1],
                                               localMax[2], 0))));
        sources.push_back(sourceMax);
    }

    if (HdChangeTracker::IsPrimIdDirty(*dirtyBits, id)) {
        GfVec4f primIdColor;
        int32_t primId = GetPrimId();
        HdBufferSourceSharedPtr source(new HdVtBufferSource(
                                           HdTokens->primID,
                                           VtValue(primId)));
        sources.push_back(source);
    }

    if (HdChangeTracker::IsAnyPrimVarDirty(*dirtyBits, id)) {
        TfTokenVector primVarNames = delegate->GetPrimVarConstantNames(id);
        sources.reserve(sources.size()+primVarNames.size());
        TF_FOR_ALL(nameIt, primVarNames) {
            if (HdChangeTracker::IsPrimVarDirty(*dirtyBits, id, *nameIt)) {
                VtValue value = delegate->Get(id, *nameIt);

                // XXX Hydra doesn't support string primvar yet
                if (value.IsHolding<std::string>()) continue;

                if (!value.IsEmpty()) {
                    HdBufferSourceSharedPtr source(
                        new HdVtBufferSource(*nameIt, value));

                    // if it's an unacceptable type, skip it (e.g. std::string)
                    if (source->GetNumComponents() > 0) {
                        sources.push_back(source);
                    }
                }
            }
        }
    }
void
My_TestGLDrawing::DrawTest(bool offscreen)
{
    std::cout << "My_TestGLDrawing::DrawTest()\n";

    HdPerfLog& perfLog = HdPerfLog::GetInstance();
    perfLog.Enable();
    
    // Reset all counters we care about.
    perfLog.ResetCache(HdTokens->extent);
    perfLog.ResetCache(HdTokens->points);
    perfLog.ResetCache(HdTokens->topology);
    perfLog.ResetCache(HdTokens->transform);
    perfLog.SetCounter(UsdImagingTokens->usdVaryingExtent, 0);
    perfLog.SetCounter(UsdImagingTokens->usdVaryingPrimvar, 0);
    perfLog.SetCounter(UsdImagingTokens->usdVaryingTopology, 0);
    perfLog.SetCounter(UsdImagingTokens->usdVaryingVisibility, 0);
    perfLog.SetCounter(UsdImagingTokens->usdVaryingXform, 0);

    int width = GetWidth(), height = GetHeight();

    double aspectRatio = double(width)/height;
    GfFrustum frustum;
    frustum.SetPerspective(60.0, aspectRatio, 1, 100000.0);

    GfMatrix4d viewMatrix;
    viewMatrix.SetIdentity();
    viewMatrix *= GfMatrix4d().SetRotate(GfRotation(GfVec3d(0, 1, 0), _rotate[0]));
    viewMatrix *= GfMatrix4d().SetRotate(GfRotation(GfVec3d(1, 0, 0), _rotate[1]));
    viewMatrix *= GfMatrix4d().SetTranslate(GfVec3d(_translate[0], _translate[1], _translate[2]));

    GfMatrix4d projMatrix = frustum.ComputeProjectionMatrix();

    GfMatrix4d modelViewMatrix = viewMatrix; 
    if (UsdGeomGetStageUpAxis(_stage) == UsdGeomTokens->z) {
        // rotate from z-up to y-up
        modelViewMatrix = 
            GfMatrix4d().SetRotate(GfRotation(GfVec3d(1.0,0.0,0.0), -90.0)) *
            modelViewMatrix;
    }

    GfVec4d viewport(0, 0, width, height);
    _engine->SetCameraState(modelViewMatrix, projMatrix, viewport);

    size_t i = 0;
    TF_FOR_ALL(timeIt, GetTimes()) {
        UsdTimeCode time = *timeIt;
        if (*timeIt == -999) {
            time = UsdTimeCode::Default();
        }
        UsdImagingGLRenderParams params;
        params.drawMode = GetDrawMode();
        params.enableLighting = IsEnabledTestLighting();
        params.enableIdRender = IsEnabledIdRender();
        params.frame = time;
        params.complexity = _GetComplexity();
        params.cullStyle = IsEnabledCullBackfaces() ?
                            UsdImagingGLCullStyle::CULL_STYLE_BACK :
                            UsdImagingGLCullStyle::CULL_STYLE_NOTHING;

        glViewport(0, 0, width, height);

        glEnable(GL_DEPTH_TEST);

        if(IsEnabledTestLighting()) {
            if(UsdImagingGLEngine::IsHydraEnabled()) {
                _engine->SetLightingState(_lightingContext);
            } else {
                _engine->SetLightingStateFromOpenGL();
            }
        }

        if (!GetClipPlanes().empty()) {
            params.clipPlanes = GetClipPlanes();
            for (size_t i=0; i<GetClipPlanes().size(); ++i) {
                glEnable(GL_CLIP_PLANE0 + i);
            }
        }

        GfVec4f const &clearColor = GetClearColor();
        GLfloat clearDepth[1] = { 1.0f };

        // Make sure we render to convergence.
        TfErrorMark mark;
        do {
            glClearBufferfv(GL_COLOR, 0, clearColor.data());
            glClearBufferfv(GL_DEPTH, 0, clearDepth);
            _engine->Render(_stage->GetPseudoRoot(), params);
        } while (!_engine->IsConverged());
        TF_VERIFY(mark.IsClean(), "Errors occurred while rendering!");

        std::cout << "itemsDrawn " << perfLog.GetCounter(HdTokens->itemsDrawn) << std::endl;
        std::cout << "totalItemCount " << perfLog.GetCounter(HdTokens->totalItemCount) << std::endl;

        std::string imageFilePath = GetOutputFilePath();
        if (!imageFilePath.empty()) {
            if (time != UsdTimeCode::Default()) {
                std::stringstream suffix;
                suffix << "_" << std::setw(3) << std::setfill('0') << params.frame << ".png";
                imageFilePath = TfStringReplace(imageFilePath, ".png", suffix.str());
            }
            std::cout << imageFilePath << "\n";
            WriteToFile("color", imageFilePath);
        }
        i++;
    }