static bool _GatherRibAttributes( const UsdPrim &prim, double currentTime, FnKat::GroupBuilder& attrsBuilder) { bool hasAttrs = false; // USD SHADING STYLE ATTRIBUTES UsdRiStatements riStatements(prim); if (riStatements) { const std::vector<UsdProperty> props = riStatements.GetRiAttributes(); std::string attrName; TF_FOR_ALL(propItr, props) { UsdProperty prop = *propItr; if (!prop) continue; std::string nameSpace = riStatements.GetRiAttributeNameSpace(prop).GetString(); nameSpace = TfStringReplace(nameSpace, ":", ".") + "."; attrName = nameSpace + riStatements.GetRiAttributeName(prop).GetString(); VtValue vtValue; UsdAttribute usdAttr = prim.GetAttribute(prop.GetName()); if (usdAttr) { if (not usdAttr.Get(&vtValue, currentTime)) continue; // XXX asShaderParam really means: // "For arrays, as a single attr vs a type/value pair group" // The type/value pair group is meaningful for attrs who don't // have a formal type definition -- like a "user" RiAttribute. // // However, other array values (such as two-element shadingrate) // are not expecting the type/value pair form and will not // generate rib correctly. As such, we'll handle the "user" // attribute as a special case. bool asShaderParam = true; if (nameSpace == "user.") { asShaderParam = false; } attrsBuilder.set(attrName, PxrUsdKatanaUtils::ConvertVtValueToKatAttr(vtValue, asShaderParam) ); } else { UsdRelationship usdRel = prim.GetRelationship(prop.GetName()); attrsBuilder.set(attrName, PxrUsdKatanaUtils::ConvertRelTargetsToKatAttr(usdRel, /* asShaderParam */ false) ); } hasAttrs = true; } }
void testPaths(char const *paths[], int expect) { Sdf_PathParserContext context; // Initialize the scanner, allowing it to be reentrant. SdfPathYylex_init(&context.scanner); while(*paths) { printf("testing: %s\n", *paths); SdfPathYy_scan_string(*paths, context.scanner); int result = SdfPathYyparse(&context); // Report parse errors. if (result != expect) { fprintf(stderr, "parse error: %s in %s\n", context.errStr.c_str(), *paths); TF_AXIOM(result == expect); } // Report mismatches between original string and the string // representation of the parsed path. We allow whitespace to // be different. if (result == 0) { std::string s = context.node->GetPathToken().GetString(); if (s != TfStringReplace(*paths, " ", "")) { fprintf(stderr, "mismatch: %s -> %s\n", *paths, s.c_str()); TF_AXIOM(s == *paths); } } ++paths; } // Clean up. SdfPathYylex_destroy(context.scanner); }
void My_TestGLDrawing::DrawTest(bool offscreen) { std::cout << "My_TestGLDrawing::DrawTest()\n"; HdPerfLog& perfLog = HdPerfLog::GetInstance(); perfLog.Enable(); // Reset all counters we care about. perfLog.ResetCache(HdTokens->extent); perfLog.ResetCache(HdTokens->points); perfLog.ResetCache(HdTokens->topology); perfLog.ResetCache(HdTokens->transform); perfLog.SetCounter(UsdImagingTokens->usdVaryingExtent, 0); perfLog.SetCounter(UsdImagingTokens->usdVaryingPrimvar, 0); perfLog.SetCounter(UsdImagingTokens->usdVaryingTopology, 0); perfLog.SetCounter(UsdImagingTokens->usdVaryingVisibility, 0); perfLog.SetCounter(UsdImagingTokens->usdVaryingXform, 0); int width = GetWidth(), height = GetHeight(); double aspectRatio = double(width)/height; GfFrustum frustum; frustum.SetPerspective(60.0, aspectRatio, 1, 100000.0); GfMatrix4d viewMatrix; viewMatrix.SetIdentity(); viewMatrix *= GfMatrix4d().SetRotate(GfRotation(GfVec3d(0, 1, 0), _rotate[0])); viewMatrix *= GfMatrix4d().SetRotate(GfRotation(GfVec3d(1, 0, 0), _rotate[1])); viewMatrix *= GfMatrix4d().SetTranslate(GfVec3d(_translate[0], _translate[1], _translate[2])); GfMatrix4d projMatrix = frustum.ComputeProjectionMatrix(); GfMatrix4d modelViewMatrix = viewMatrix; if (UsdGeomGetStageUpAxis(_stage) == UsdGeomTokens->z) { // rotate from z-up to y-up modelViewMatrix = GfMatrix4d().SetRotate(GfRotation(GfVec3d(1.0,0.0,0.0), -90.0)) * modelViewMatrix; } GfVec4d viewport(0, 0, width, height); _engine->SetCameraState(modelViewMatrix, projMatrix, viewport); size_t i = 0; TF_FOR_ALL(timeIt, GetTimes()) { UsdTimeCode time = *timeIt; if (*timeIt == -999) { time = UsdTimeCode::Default(); } UsdImagingGLRenderParams params; params.drawMode = GetDrawMode(); params.enableLighting = IsEnabledTestLighting(); params.enableIdRender = IsEnabledIdRender(); params.frame = time; params.complexity = _GetComplexity(); params.cullStyle = IsEnabledCullBackfaces() ? UsdImagingGLCullStyle::CULL_STYLE_BACK : UsdImagingGLCullStyle::CULL_STYLE_NOTHING; glViewport(0, 0, width, height); glEnable(GL_DEPTH_TEST); if(IsEnabledTestLighting()) { if(UsdImagingGLEngine::IsHydraEnabled()) { _engine->SetLightingState(_lightingContext); } else { _engine->SetLightingStateFromOpenGL(); } } if (!GetClipPlanes().empty()) { params.clipPlanes = GetClipPlanes(); for (size_t i=0; i<GetClipPlanes().size(); ++i) { glEnable(GL_CLIP_PLANE0 + i); } } GfVec4f const &clearColor = GetClearColor(); GLfloat clearDepth[1] = { 1.0f }; // Make sure we render to convergence. TfErrorMark mark; do { glClearBufferfv(GL_COLOR, 0, clearColor.data()); glClearBufferfv(GL_DEPTH, 0, clearDepth); _engine->Render(_stage->GetPseudoRoot(), params); } while (!_engine->IsConverged()); TF_VERIFY(mark.IsClean(), "Errors occurred while rendering!"); std::cout << "itemsDrawn " << perfLog.GetCounter(HdTokens->itemsDrawn) << std::endl; std::cout << "totalItemCount " << perfLog.GetCounter(HdTokens->totalItemCount) << std::endl; std::string imageFilePath = GetOutputFilePath(); if (!imageFilePath.empty()) { if (time != UsdTimeCode::Default()) { std::stringstream suffix; suffix << "_" << std::setw(3) << std::setfill('0') << params.frame << ".png"; imageFilePath = TfStringReplace(imageFilePath, ".png", suffix.str()); } std::cout << imageFilePath << "\n"; WriteToFile("color", imageFilePath); } i++; }