void RunConfiguration::addConfigurations(std::istream& is) { std::string type; PropertySet conf; while(is >> type) { is >> conf; if (type == INFERENCE_CONF_TOKEN) { _inferences.push_back(conf); } else if (type == EVIDENCE_CONF_TOKEN) { _evidences.push_back(conf); } else if (type == EM_STEP_CONF_TOKEN) { std::set<PropertyKey> keys = conf.keys(); std::set<PropertyKey>::iterator i = keys.begin(); EMStep e; for ( ; i != keys.end(); ++i) { std::vector< std::string > edges = tokenizeString(conf.getAs<std::string>(*i), false, ";"); SmallSet< std::string > s(edges.begin(), edges.end(), edges.size()); e[*i] = s; } _emsteps.push_back(e); } else if (type == EM_CONF_TOKEN) { _em = conf; } else if (type == PATHWAY_CONF_TOKEN) { _path = conf; } else { THROW("Expecting an inference or evidence token in conf file"); } } }
SCM building_get_property(int handle, const char* name) { Building* building = BuildingManager::current()->get_building_by_id(handle); if (!building) { std::cout << "building_get_property: unknown handle: " << handle << std::endl; return SCM_UNSPECIFIED; } else { PropertySet* properties = building->get_properties(); if (!properties) { return SCM_UNSPECIFIED; } else { Property* property = properties->lookup(name); if (!property) { return SCM_UNSPECIFIED; } else { return Guile::property2scm(*property); } } } }
File ApplicationConfiguration::getApplicationDirectory(){ PropertySet* properties = getApplicationProperties(); File dir(properties->getValue("application-directory")); if(!dir.exists()) dir.createDirectory(); return dir; }
//-------------------------------------------------------------------------------------------------- /// //-------------------------------------------------------------------------------------------------- void PropertyXmlSerializer::createAddXmlElementFromPropertySet(const PropertySet& propertySet, XmlElement* parent) { CVF_ASSERT(parent); XmlElement* xmlPropSet = parent->addChildElement("PropertySet"); CVF_ASSERT(xmlPropSet); xmlPropSet->setAttributeString("classType", propertySet.classType()); std::vector<String> keys = propertySet.allKeys(); std::vector<Variant> values = propertySet.allValues(); size_t numKeyValues = keys.size(); CVF_ASSERT(numKeyValues == values.size()); for (size_t i = 0; i < numKeyValues; i++) { const String& key = keys[i]; const Variant& value = values[i]; if (value.isValid()) { XmlElement* xmlKeyValueElement = createAddXmlElementFromVariant(value, xmlPropSet); CVF_ASSERT(xmlKeyValueElement); xmlKeyValueElement->setAttributeString("key", key); } } }
TEST(Property, StringProp) { #if __cplusplus > 201100L || (defined(_MSC_VER) && _MSC_VER >= 1800) using namespace std; #else using namespace std::tr1; #endif PropertyDefMap propertyDefs; Foo foo; PropertySet props; PropertyDefMap::iterator defi = propertyDefs.insert(PropertyDefMap::value_type("name", PropertyDef("name", "The name of the object.", PROP_STRING))).first; props.addProperty( OGRE_NEW Property<String>(&(defi->second), bind(&Foo::getName, &foo), bind(&Foo::setName, &foo, placeholders::_1))); Ogre::String strName, strTest; strTest = "A simple name"; props.setValue("name", strTest); props.getValue("name", strName); ASSERT_EQ(strTest, strName); }
ImageEffect::SequenceArgs::SequenceArgs(ImageEffectHost *host, PropertySet &inArgs) : ImageEffect::RenderScaleArgs(host, inArgs) { interactive = (inArgs.getInt(kOfxPropIsInteractive, 0) == 1); #ifdef OFX_API_1_2 if (host->checkAPIVersion(1, 2)) { // Specification Mismatch // -> Nuke 6.1, supposed to support OpenFX API 1.2 does not set those //sequentialRender = (inArgs.getInt(kOfxImageEffectPropSequentialRenderStatus, 0) != 0); //interactiveRender = (inArgs.getInt(kOfxImageEffectPropInteractiveRenderStatus, 0) != 0); sequentialRender = false; interactiveRender = false; } else { sequentialRender = false; interactiveRender = false; } #endif #ifdef OFX_API_1_3 if (host->checkAPIVersion(1, 3)) { glEnabled = (inArgs.getInt(kOfxImageEffectPropOpenGLEnabled, 0) == 1); glTextureIndex = inArgs.getInt(kOfxImageEffectPropOpenGLTextureIndex, 0); glTextureTarget = inArgs.getInt(kOfxImageEffectPropOpenGLTextureTarget, 0); } else { glEnabled = false; glTextureIndex = -1; glTextureTarget = -1; } #endif }
/** @brief ctor */ KeyArgs::KeyArgs( const PropertySet& props ) : InteractArgs( props ) { time = props.propGetDouble( kOfxPropTime ); renderScale = getRenderScale( props ); keyString = props.propGetString( kOfxPropKeyString ); }
void KeyframeEffectModelBase::ensureKeyframeGroups() const { if (m_keyframeGroups) return; m_keyframeGroups = adoptPtrWillBeNoop(new KeyframeGroupMap); const KeyframeVector keyframes = normalizedKeyframes(getFrames()); for (KeyframeVector::const_iterator keyframeIter = keyframes.begin(); keyframeIter != keyframes.end(); ++keyframeIter) { const Keyframe* keyframe = keyframeIter->get(); PropertySet keyframeProperties = keyframe->properties(); for (PropertySet::const_iterator propertyIter = keyframeProperties.begin(); propertyIter != keyframeProperties.end(); ++propertyIter) { CSSPropertyID property = *propertyIter; ASSERT_WITH_MESSAGE(!isExpandedShorthand(property), "Web Animations: Encountered shorthand CSS property (%d) in normalized keyframes.", property); KeyframeGroupMap::iterator groupIter = m_keyframeGroups->find(property); PropertySpecificKeyframeGroup* group; if (groupIter == m_keyframeGroups->end()) group = m_keyframeGroups->add(property, adoptPtrWillBeNoop(new PropertySpecificKeyframeGroup)).storedValue->value.get(); else group = groupIter->value.get(); group->appendKeyframe(keyframe->createPropertySpecificKeyframe(property)); } } // Add synthetic keyframes. for (KeyframeGroupMap::iterator iter = m_keyframeGroups->begin(); iter != m_keyframeGroups->end(); ++iter) { iter->value->addSyntheticKeyframeIfRequired(this); iter->value->removeRedundantKeyframes(); } }
double SemanticObject::getLikelihood(const PropertySet& ev) const { //propagate(ev.getTimeStamp()); double likelihood = 1; vector<Attribute> need_to_deduce; const map<Attribute, Property*>& ev_props = ev.getPropertyMap(); for(map<Attribute, Property*>::const_iterator it = ev_props.begin(); it != ev_props.end(); ++it) { const Attribute& attribute = it->first; const Property* ev_prop = it->second; const Property* this_prop = getProperty(attribute); if (this_prop) { likelihood *= this_prop->getLikelihood(ev_prop->getValue()); } else { need_to_deduce.push_back(attribute); } } vector<Property> deduced_props = KnowledgeDatabase::getInstance().inferProperties(*this, need_to_deduce); for(vector<Property>::iterator it_prop = deduced_props.begin(); it_prop != deduced_props.end(); ++it_prop) { const Property* ev_prop = ev.getProperty(it_prop->getAttribute()); assert(ev_prop); likelihood *= it_prop->getLikelihood(ev_prop->getValue()); } // cout << " Likelihood existing = " << likelihood << endl; return likelihood; }
ImageEffect::RenderArgs::RenderArgs(ImageEffectHost *host, PropertySet &inArgs) : ImageEffect::RenderScaleArgs(host, inArgs), ImageEffect::TimeArgs(host, inArgs) { field = StringToImageField(inArgs.getString(kOfxImageEffectPropFieldToRender, 0)); inArgs.getInts(kOfxImageEffectPropRenderWindow, 4, &(renderWindow.x1)); #ifdef OFX_API_1_2 if (host->checkAPIVersion(1, 2)) { // Specification Mismatch // -> Nuke 6.1, supposed to support OpenFX API 1.2 does not set those //sequentialRender = (inArgs.getInt(kOfxImageEffectPropSequentialRenderStatus, 0) != 0); //interactiveRender = (inArgs.getInt(kOfxImageEffectPropInteractiveRenderStatus, 0) != 0); sequentialRender = false; interactiveRender = false; } else { sequentialRender = false; interactiveRender = false; } #endif #ifdef OFX_API_1_3 if (host->checkAPIVersion(1, 3)) { glEnabled = (inArgs.getInt(kOfxImageEffectPropOpenGLEnabled, 0) == 1); glTextureIndex = inArgs.getInt(kOfxImageEffectPropOpenGLTextureIndex, 0); glTextureTarget = inArgs.getInt(kOfxImageEffectPropOpenGLTextureTarget, 0); } else { glEnabled = false; glTextureIndex = -1; glTextureTarget = -1; } #endif }
void TestHSMM::test_loglik(const char* filename, size_t ID, string type){ HSMMparam param(filename); vector<Real> likelihood_test; FactorGraph *graph; JTree *jt; // Set some constants size_t maxiter = 10000; Real tol = 1e-9; size_t verb = 0; // Store the constants in a PropertySet object PropertySet opts; opts.set("maxiter",maxiter); // Maximum number of iterations opts.set("tol",tol); // Tolerance for convergence opts.set("verbose",verb); // Verbosity (amount of output generated) cout << "Now we do testing...\n"; for(size_t i=0; i<test_data.size(); i++) { //initialize HSMM of size equal the number of observations graph = new FactorGraph(); graph->createHSMMFactorGraph(param.init, param.dist, test_data[i].size()); jt = new JTree(*graph, opts("updates",string("HUGIN"))("heuristic",string("MINWEIGHT")) ); //clamp the observation variables to their observed values for(size_t j = 0; j < test_data[i].size(); j++ ){ //cout << "clamping var" << test_data[i][j].first << " to value " << test_data[i][j].second << "\n"; jt->clamp(test_data[i][j].first, test_data[i][j].second); } jt->init(); jt->run(); //compute normalized loglikelyhood likelihood_test.push_back(jt->logZ()/test_data[i].size()); delete jt; delete graph; cout << "Tested point " << i << " out of " << test_data.size() <<"\n"; } cout << "done.\n"; ofstream os; stringstream result; result << string("data/HSMMlikelihood_") << type << string("_") << ID << string(".txt"); os.open(result.str().c_str(), ios::trunc); for(size_t i=0; i<likelihood_test.size(); i++){ os << likelihood_test.at(i)<<"\n"; } }
bool CompositorAnimations::getAnimatedBoundingBox(FloatBox& box, const AnimationEffect& effect, double minValue, double maxValue) const { const KeyframeEffectModelBase& keyframeEffect = toKeyframeEffectModelBase(effect); PropertySet properties = keyframeEffect.properties(); if (properties.isEmpty()) return true; minValue = std::min(minValue, 0.0); maxValue = std::max(maxValue, 1.0); for (const auto& property : properties) { // TODO: Add the ability to get expanded bounds for filters as well. if (property != CSSPropertyTransform && property != CSSPropertyWebkitTransform) continue; const PropertySpecificKeyframeVector& frames = keyframeEffect.getPropertySpecificKeyframes(property); if (frames.isEmpty() || frames.size() < 2) continue; FloatBox originalBox(box); for (size_t j = 0; j < frames.size() - 1; ++j) { const AnimatableTransform* startTransform = toAnimatableTransform(frames[j]->getAnimatableValue().get()); const AnimatableTransform* endTransform = toAnimatableTransform(frames[j+1]->getAnimatableValue().get()); if (!startTransform || !endTransform) return false; // TODO: Add support for inflating modes other than Replace. if (frames[j]->composite() != AnimationEffect::CompositeReplace) return false; const TimingFunction& timing = frames[j]->easing(); double min = 0; double max = 1; if (j == 0) { float frameLength = frames[j+1]->offset(); if (frameLength > 0) { min = minValue / frameLength; } } if (j == frames.size() - 2) { float frameLength = frames[j+1]->offset() - frames[j]->offset(); if (frameLength > 0) { max = 1 + (maxValue - 1) / frameLength; } } FloatBox bounds; timing.range(&min, &max); if (!endTransform->transformOperations().blendedBoundsForBox(originalBox, startTransform->transformOperations(), min, max, &bounds)) return false; box.expandTo(bounds); } } return true; }
void CompositorAnimationsImpl::getAnimationOnCompositor(const Timing& timing, const KeyframeEffectModel& effect, Vector<OwnPtr<blink::WebAnimation> >& animations) { ASSERT(animations.isEmpty()); CompositorTiming compositorTiming; bool timingValid = convertTimingForCompositor(timing, compositorTiming); ASSERT_UNUSED(timingValid, timingValid); RefPtr<TimingFunction> timingFunction = timing.timingFunction; if (compositorTiming.reverse) timingFunction = CompositorAnimationsTimingFunctionReverser::reverse(timingFunction.get()); PropertySet properties = effect.properties(); ASSERT(!properties.isEmpty()); for (PropertySet::iterator it = properties.begin(); it != properties.end(); ++it) { KeyframeVector values; getKeyframeValuesForProperty(&effect, *it, compositorTiming.scaledDuration, compositorTiming.reverse, values); blink::WebAnimation::TargetProperty targetProperty; OwnPtr<blink::WebAnimationCurve> curve; switch (*it) { case CSSPropertyOpacity: { targetProperty = blink::WebAnimation::TargetPropertyOpacity; blink::WebFloatAnimationCurve* floatCurve = blink::Platform::current()->compositorSupport()->createFloatAnimationCurve(); addKeyframesToCurve(*floatCurve, values, *timingFunction.get()); curve = adoptPtr(floatCurve); break; } case CSSPropertyWebkitFilter: { targetProperty = blink::WebAnimation::TargetPropertyFilter; blink::WebFilterAnimationCurve* filterCurve = blink::Platform::current()->compositorSupport()->createFilterAnimationCurve(); addKeyframesToCurve(*filterCurve, values, *timingFunction); curve = adoptPtr(filterCurve); break; } case CSSPropertyWebkitTransform: { targetProperty = blink::WebAnimation::TargetPropertyTransform; blink::WebTransformAnimationCurve* transformCurve = blink::Platform::current()->compositorSupport()->createTransformAnimationCurve(); addKeyframesToCurve(*transformCurve, values, *timingFunction.get()); curve = adoptPtr(transformCurve); break; } default: ASSERT_NOT_REACHED(); continue; } ASSERT(curve.get()); OwnPtr<blink::WebAnimation> animation = adoptPtr(blink::Platform::current()->compositorSupport()->createAnimation(*curve, targetProperty)); animation->setIterations(compositorTiming.adjustedIterationCount); animation->setTimeOffset(compositorTiming.scaledTimeOffset); animation->setAlternatesDirection(compositorTiming.alternate); animations.append(animation.release()); } ASSERT(!animations.isEmpty()); }
/** @brief ctor */ PenArgs::PenArgs( const PropertySet& props ) : InteractArgs( props ) { pixelScale = getPixelScale( props ); penPosition.x = props.propGetDouble( kOfxInteractPropPenPosition, 0 ); penPosition.y = props.propGetDouble( kOfxInteractPropPenPosition, 1 ); penPressure = props.propGetDouble( kOfxInteractPropPenPressure ); }
PropertySet MR::getProperties() const { PropertySet opts; opts.set( "tol", props.tol ); opts.set( "verbose", props.verbose ); opts.set( "updates", props.updates ); opts.set( "inits", props.inits ); return opts; }
PropertySet MF::getProperties() const { PropertySet opts; opts.Set( "tol", props.tol ); opts.Set( "maxiter", props.maxiter ); opts.Set( "verbose", props.verbose ); opts.Set( "damping", props.damping ); return opts; }
/** @brief fetch a background colour out of the property set */ static OfxRGBColourD getBackgroundColour( const PropertySet& props ) { OfxRGBColourD backGroundColour; backGroundColour.r = props.propGetDouble( kOfxInteractPropBackgroundColour, 0 ); backGroundColour.g = props.propGetDouble( kOfxInteractPropBackgroundColour, 1 ); backGroundColour.b = props.propGetDouble( kOfxInteractPropBackgroundColour, 2 ); return backGroundColour; }
PropertySet StringKeyframe::properties() const { // This is not used in time-critical code, so we probably don't need to // worry about caching this result. PropertySet properties; for (unsigned i = 0; i < m_propertySet->propertyCount(); ++i) properties.add(m_propertySet->propertyAt(i).id()); return properties; }
PropertySet AnimatableValueKeyframe::properties() const { // This is not used in time-critical code, so we probably don't need to // worry about caching this result. PropertySet properties; for (PropertyValueMap::const_iterator iter = m_propertyValues.begin(); iter != m_propertyValues.end(); ++iter) properties.add(*iter.keys()); return properties; }
PropertySet TreeEP::getProperties() const { PropertySet opts; opts.set( "tol", props.tol ); opts.set( "maxiter", props.maxiter ); opts.set( "maxtime", props.maxtime ); opts.set( "verbose", props.verbose ); opts.set( "type", props.type ); return opts; }
//============================================================================== ApplicationSettingsWindow::ApplicationSettingsWindow (OwlControlSettings& settings, AudioDeviceManager& deviceManager) : theSettings(settings) { //[Constructor_pre] You can add your own custom stuff here.. //[/Constructor_pre] addAndMakeVisible (audioSelector = new AudioDeviceSelectorComponent (deviceManager,0,0,0,0,true,true,false,false)); audioSelector->setName ("new component"); addAndMakeVisible (midiDeviceBox = new ComboBox ("new combo box")); midiDeviceBox->setEditableText (false); midiDeviceBox->setJustificationType (Justification::centredLeft); midiDeviceBox->setTextWhenNothingSelected (String()); midiDeviceBox->setTextWhenNoChoicesAvailable (TRANS("(no choices)")); midiDeviceBox->addItem (TRANS("OMNI"), 1); midiDeviceBox->addItem (TRANS("1"), 2); midiDeviceBox->addItem (TRANS("2"), 3); midiDeviceBox->addItem (TRANS("3"), 4); midiDeviceBox->addItem (TRANS("4"), 5); midiDeviceBox->addItem (TRANS("5"), 6); midiDeviceBox->addItem (TRANS("6"), 7); midiDeviceBox->addItem (TRANS("7"), 8); midiDeviceBox->addItem (TRANS("8"), 9); midiDeviceBox->addItem (TRANS("9"), 10); midiDeviceBox->addItem (TRANS("10"), 11); midiDeviceBox->addItem (TRANS("11"), 12); midiDeviceBox->addItem (TRANS("12"), 13); midiDeviceBox->addItem (TRANS("13"), 14); midiDeviceBox->addItem (TRANS("14"), 15); midiDeviceBox->addItem (TRANS("15"), 16); midiDeviceBox->addItem (TRANS("16"), 17); midiDeviceBox->addListener (this); addAndMakeVisible (label = new Label ("new label", TRANS("MIDI Device"))); label->setFont (Font (15.00f, Font::plain)); label->setJustificationType (Justification::centredLeft); label->setEditable (false, false, false); label->setColour (TextEditor::textColourId, Colours::black); label->setColour (TextEditor::backgroundColourId, Colour (0x00000000)); //[UserPreSize] //[/UserPreSize] setSize (600, 400); //[Constructor] You can add your own custom stuff here.. PropertySet* properties = ApplicationConfiguration::getApplicationProperties(); deviceManager.setDefaultMidiOutput(properties->getValue("midi-output")); deviceManager.setMidiInputEnabled(properties->getValue("midi-input"), 1); //[/Constructor] }
void HAK::setProperties( const PropertySet &opts ) { DAI_ASSERT( opts.hasKey("tol") ); DAI_ASSERT( opts.hasKey("doubleloop") ); DAI_ASSERT( opts.hasKey("clusters") ); props.tol = opts.getStringAs<Real>("tol"); props.doubleloop = opts.getStringAs<bool>("doubleloop"); props.clusters = opts.getStringAs<Properties::ClustersType>("clusters"); if( opts.hasKey("maxiter") ) props.maxiter = opts.getStringAs<size_t>("maxiter"); else props.maxiter = 10000; if( opts.hasKey("maxtime") ) props.maxtime = opts.getStringAs<Real>("maxtime"); else props.maxtime = INFINITY; if( opts.hasKey("verbose") ) props.verbose = opts.getStringAs<size_t>("verbose"); else props.verbose = 0; if( opts.hasKey("loopdepth") ) props.loopdepth = opts.getStringAs<size_t>("loopdepth"); else DAI_ASSERT( props.clusters != Properties::ClustersType::LOOP ); if( opts.hasKey("damping") ) props.damping = opts.getStringAs<Real>("damping"); else props.damping = 0.0; if( opts.hasKey("init") ) props.init = opts.getStringAs<Properties::InitType>("init"); else props.init = Properties::InitType::UNIFORM; }
KVOID BulletFactor::BindEffect( PropertySet kProp ) { ////////////////////////////////////////////////////////////////////////// // 加载特效 KSTR sEffect; if (kProp.GetStrValue("$SpawnEffect",sEffect)) { KFLOAT fTimes = -1.0f; kProp.GetFloatValue("$SpawnEffectTime",fTimes); EffectObject* pObj = AttachEffect(sEffect,fTimes); if (pObj) { // 设置特效缩放 KFLOAT fEfScale; if (kProp.GetFloatValue("$SpawnEffectScale",fEfScale)) { pObj->SetScale(fEfScale); } pObj->SetUserData(1); pObj->SetCallbackObj(this); } } if (kProp.GetStrValue("$IdleEffect",sEffect)) { KFLOAT fTimes = -1.0f; kProp.GetFloatValue("$IdleEffectTime",fTimes); EffectObject* pObj = AttachEffect(sEffect,fTimes); if (pObj) { // 设置特效缩放 KFLOAT fEfScale; if (kProp.GetFloatValue("$IdleEffectScale",fEfScale)) { pObj->SetScale(fEfScale); } pObj->SetUserData(2); pObj->SetCallbackObj(this); } } if (kProp.GetStrValue("$DestroyEffect",sEffect)) { KFLOAT fTimes = -1.0f; kProp.GetFloatValue("$DestroyEffectTime",fTimes); EffectObject* pObj = AttachEffect(sEffect,fTimes); if (pObj) { // 设置特效缩放 KFLOAT fEfScale; if (kProp.GetFloatValue("$DestroyEffectScale",fEfScale)) { pObj->SetScale(fEfScale); } pObj->SetUserData(3); pObj->SetCallbackObj(this); } } }
bool BruteForceOptMatching::matchPoints(ConnectedFactorGraph& graph, const int numIter, const float maxDiff, const float damping, McDArray<McVec2i>& matchedPointPairs, McDArray<int>& ambiguousAssignments) { FactorGraph fg(graph.factors); cout << "\n run BP for network with " << graph.factors.size() << " factors and " << graph.factors[0].vars().front().states() << "states"; PropertySet opts; opts.set("tol", (Real)maxDiff); opts.set("maxiter", (size_t)numIter); opts.set("maxtime", (Real)180); opts.set("verbose", (size_t)1); opts.set("updates", string("SEQMAX")); opts.set("logdomain", (bool)true); opts.set("inference", string("MAXPROD")); opts.set("damping", (Real)damping); BP ia(fg, opts); ia.init(); try { ia.run(); } catch (Exception e) { outputSingleFactorValues(graph); outputDoubleFactorValues(graph); throw e; } getMaxProbAssignments(ia, fg, graph, matchedPointPairs); checkAmbiguities(ia, fg, graph, ambiguousAssignments); checkAmbiguitiesInAssignments(graph, matchedPointPairs, ambiguousAssignments); return (ia.maxDiff() < maxDiff); }
void ImageEffect::GetClipPrefArgs::setOutputs(PropertySet &outArgs) { static std::string compBase = "OfxImageClipPropComponents_"; static std::string depthBase = "OfxImageClipPropDepth_"; static std::string parBase = "OfxImageClipPropPAR_"; bool multiDepth = (host ? host->supportsMultipleClipDepths() : false); bool multiPAR = (host ? host->supportsMultipleClipPARs() : false); std::map<std::string, ClipPreferences>::iterator it = prefs.begin(); while (it != prefs.end()) { std::string name; name = compBase + it->first; outArgs.setString(name.c_str(), 0, ImageComponentToString(it->second.components)); if (multiDepth) { name = depthBase + it->first; outArgs.setString(name.c_str(), 0, BitDepthToString(it->second.bitDepth)); } if (multiPAR) { name = parBase + it->first; outArgs.setDouble(name.c_str(), 0, it->second.pixelAspectRatio); } ++it; } outArgs.setDouble(kOfxImageEffectPropFrameRate, 0, outPref.frameRate); outArgs.setString(kOfxImageClipPropFieldOrder, 0, ImageFieldOrderToString(outPref.fieldOrder)); outArgs.setString(kOfxImageEffectPropPreMultiplication, 0, ImagePreMultToString(outPref.preMult)); outArgs.setInt(kOfxImageClipPropContinuousSamples, 0, (outPref.continuousSamples ? 1 : 0)); outArgs.setInt(kOfxImageEffectFrameVarying, 0, (outPref.frameVarying ? 1 : 0)); }
PropertySet KeyframeEffectModelBase::properties() const { PropertySet result; if (!m_keyframes.size()) { return result; } result = m_keyframes[0]->properties(); for (size_t i = 1; i < m_keyframes.size(); i++) { PropertySet extras = m_keyframes[i]->properties(); for (PropertySet::const_iterator it = extras.begin(); it != extras.end(); ++it) { result.add(*it); } } return result; }
void ImageEffect::GetFramesNeededArgs::setOutputs(PropertySet &outArgs) { static std::string outBaseName = "OfxImageClipPropFrameRange_"; std::map<std::string, FrameRangeList>::iterator it = inRanges.begin(); while (it != inRanges.end()) { std::string outName = outBaseName + it->first; FrameRangeList &frl = it->second; int i = 0; for (size_t j=0; j<frl.size(); ++j, i+=2) { outArgs.setDouble(outName.c_str(), i, frl[j].min); outArgs.setDouble(outName.c_str(), i+1, frl[j].max); } ++it; } }
/** @brief fetch a background colour out of the property set */ static OfxRGBColourD getBackgroundColour(const PropertySet &props) { OfxRGBColourD backGroundColour = {0., 0., 0.}; props.propGetDoubleN(kOfxInteractPropBackgroundColour, &backGroundColour.r, 3); return backGroundColour; }
void filterData(const std::string& filterExpr, Reader& reader) { Gto::Reader::Properties& properties = reader.properties(); Gto::Reader::Objects& objects = reader.objects(); for (size_t i=0; i < properties.size(); i++) { Gto::Reader::PropertyInfo& p = properties[i]; const Gto::Reader::ComponentInfo* c = p.component; const Gto::Reader::ObjectInfo* o = c->object; string name; name = reader.stringFromId(o->name); name += "."; name += reader.stringFromId(c->name); name += "."; name += reader.stringFromId(p.name); if (!fnmatch(filterExpr.c_str(), name.c_str(), 0)) { filteredProperties.insert(&p); filteredObjects.insert(o); } } for (size_t i=0; i < objects.size(); i++) { Gto::Reader::ObjectInfo* p = &objects[i]; if (filteredObjects.find(p) != filteredObjects.end()) { reader.accessObject(*p); } } }
/** @brief fetch a render scale out of the property set */ static OfxPointD getRenderScale(const PropertySet &props) { OfxPointD v = {1., 1.}; props.propGetDoubleN(kOfxImageEffectPropRenderScale, &v.x, 2); return v; }