static bool _CheckUsdTypeAndResizeArrays( const UsdAttribute& usdAttr, const TfType& expectedType, const GfInterval& timeInterval, std::vector<double>* timeSamples, MTimeArray* timeArray, MDoubleArray* valueArray) { // Validate that the attribute holds values of the expected type. const TfType type = usdAttr.GetTypeName().GetType(); if (type != expectedType) { TF_CODING_ERROR("Unsupported type name for USD attribute '%s': %s", usdAttr.GetName().GetText(), type.GetTypeName().c_str()); return false; } if (!usdAttr.GetTimeSamplesInInterval(timeInterval, timeSamples)) { return false; } size_t numTimeSamples = timeSamples->size(); if (numTimeSamples < 1) { return false; } timeArray->setLength(numTimeSamples); valueArray->setLength(numTimeSamples); return true; }
const std::vector<double> PxrUsdKatanaUsdInPrivateData::GetMotionSampleTimes( const UsdAttribute& attr) const { static std::vector<double> noMotion = {0.0}; if ((attr && !PxrUsdKatanaUtils::IsAttributeVarying(attr, _currentTime)) || _motionSampleTimesFallback.size() < 2) { return noMotion; } // If an override was explicitly specified for this prim, return it. // if (_motionSampleTimesOverride.size() > 0) { return _motionSampleTimesOverride; } // // Otherwise, try computing motion sample times. If they can't be computed, // fall back on the parent data's times. // // Early exit if we don't have a valid attribute. // if (!attr) { return _motionSampleTimesFallback; } // Allowable error in sample time comparison. static const double epsilon = 0.0001; double shutterStartTime, shutterCloseTime; // Calculate shutter start and close times based on // the direction of motion blur. if (IsMotionBackward()) { shutterStartTime = _currentTime - _shutterClose; shutterCloseTime = _currentTime - _shutterOpen; } else { shutterStartTime = _currentTime + _shutterOpen; shutterCloseTime = _currentTime + _shutterClose; } // get the time samples for our frame interval std::vector<double> result; if (!attr.GetTimeSamplesInInterval( GfInterval(shutterStartTime, shutterCloseTime), &result)) { return _motionSampleTimesFallback; } bool foundSamplesInInterval = !result.empty(); double firstSample, lastSample; if (foundSamplesInInterval) { firstSample = result.front(); lastSample = result.back(); } else { firstSample = shutterStartTime; lastSample = shutterCloseTime; } // If no samples were found or the first sample is later than the // shutter start time then attempt to get the previous sample in time. if (!foundSamplesInInterval || (firstSample-shutterStartTime) > epsilon) { double lower, upper; bool hasTimeSamples; if (attr.GetBracketingTimeSamples( shutterStartTime, &lower, &upper, &hasTimeSamples)) { if (lower > shutterStartTime) { // Did not find a sample ealier than the shutter start. // Return no motion. return noMotion; } // Insert the first sample as long as it is different // than what we already have. if (fabs(lower-firstSample) > epsilon) { result.insert(result.begin(), lower); } } } // If no samples were found or the last sample is earlier than the // shutter close time then attempt to get the next sample in time. if (!foundSamplesInInterval || (shutterCloseTime-lastSample) > epsilon) { double lower, upper; bool hasTimeSamples; if (attr.GetBracketingTimeSamples( shutterCloseTime, &lower, &upper, &hasTimeSamples)) { if (upper < shutterCloseTime) { // Did not find a sample later than the shutter close. // Return no motion. return noMotion; } // Append the last sample as long as it is different // than what we already have. if (fabs(upper-lastSample) > epsilon) { result.push_back(upper); } } } // convert from absolute to frame-relative time samples for (std::vector<double>::iterator I = result.begin(); I != result.end(); ++I) { (*I) -= _currentTime; } return result; }