IsoMediaFile::CodingConstraints WriterConfig::readCodingConstraints(const Json::Value& ccstValues) const { IsoMediaFile::CodingConstraints ccst; ccst.allRefPicsIntra = readBool(ccstValues["all_ref_pics_intra"], true); ccst.intraPredUsed = readBool(ccstValues["intra_pred_used"], false); return ccst; }
bool TEIniFile::getBool(const QString & name, bool & value) { if (SectionListDef[m_sCurSection].find(name)==SectionListDef[m_sCurSection].end()) { value=readBool(name,false); return false; } value=readBool(name,SectionListDef[m_sCurSection][name].toInt()!=0); return true; }
void Mega8Config::loadConfig(const wxString &profile) { bool isNew; if (_config == NULL) { _config = new wxConfig(wxT("Mega8"), wxT("Ready4Next")); if (!readBool(wxT("FirstInit"))) { resetConfig(); writeBool(wxT("FirstInit"), true); saveConfig(profile); } } _currentProfile = profile; isNew = loadKeyboard(profile); _LastFolder = readString(wxT("LastFolder")); _FullScreen = readBool(wxT("FullScreen")); _SpeedAuto = readBool(wxT("SpeedAuto")); _DisplayHUD = readBool(wxT("DisplayHUD")); _Filtered = readBool(wxT("Filtered")); _Sound = readBool(wxT("Sound")); _UseSleep = readBool(wxT("UseSleep")); _SyncClock = readBool(wxT("SyncClock")); _ColorTheme = (Chip8ColorTheme)readLong(wxT("ColorTheme")); _InverseColor = readBool(wxT("InverseColor")); for (int i = 0; i <= sizeof(Chip8Types); i++) { _FrequencyRatio[i] = (long)min((long)max((long)readLong(wxT("FrequencyRatio/") + getMachineTypeStr((Chip8Types) i)), (long)4), (long)9); } // Save this profile if new if (isNew) { saveConfig(profile); } }
bool recalcPhiFunctionObject::start() { UName_=word(dict_.lookup("UName")); phiName_=word(dict_.lookup("phiName")); pName_=word(dict_.lookup("pName")); rhoName_=dict_.lookupOrDefault<word>("rhoName","none"); writeOldFields_=readBool(dict_.lookup("writeOldFields")); writeFields_=readBool(dict_.lookup("writeFields")); return updateSimpleFunctionObject::start(); }
void DocxReader::readRunProperties(Style& style, bool allowstyles) { while (m_xml.readNextStartElement()) { QStringRef value = m_xml.attributes().value("w:val"); if ((m_xml.qualifiedName() == "w:b") || (m_xml.qualifiedName() == "w:bCs")) { style.char_format.setFontWeight(readBool(value) ? QFont::Bold : QFont::Normal); } else if ((m_xml.qualifiedName() == "w:i") || (m_xml.qualifiedName() == "w:iCs")) { style.char_format.setFontItalic(readBool(value)); } else if (m_xml.qualifiedName() == "w:u") { if (value == "single") { style.char_format.setFontUnderline(true); } else if (value == "none") { style.char_format.setFontUnderline(false); } else if ((value == "dash") || (value == "dashDotDotHeavy") || (value == "dashDotHeavy") || (value == "dashedHeavy") || (value == "dashLong") || (value == "dashLongHeavy") || (value == "dotDash") || (value == "dotDotDash") || (value == "dotted") || (value == "dottedHeavy") || (value == "double") || (value == "thick") || (value == "wave") || (value == "wavyDouble") || (value == "wavyHeavy") || (value == "words")) { style.char_format.setFontUnderline(true); } } else if (m_xml.qualifiedName() == "w:strike") { style.char_format.setFontStrikeOut(readBool(value)); } else if (m_xml.qualifiedName() == "w:vertAlign") { if (value == "superscript") { style.char_format.setVerticalAlignment(QTextCharFormat::AlignSuperScript); } else if (value == "subscript") { style.char_format.setVerticalAlignment(QTextCharFormat::AlignSubScript); } else if (value == "baseline") { style.char_format.setVerticalAlignment(QTextCharFormat::AlignNormal); } } else if ((m_xml.qualifiedName() == "w:rStyle") && allowstyles) { Style rstyle = m_styles.value(value.toString()); rstyle.merge(style); style = rstyle; } m_xml.skipCurrentElement(); } }
// Construct from components IncompressibleCloud::IncompressibleCloud( const volPointInterpolation& vpi, const volVectorField& U ) : Cloud<HardBallParticle>(U.mesh()), runTime_(U.time()), time0_(runTime_.value()), mesh_(U.mesh()), volPointInterpolation_(vpi), U_(U), smoment_(mesh_.nCells(), vector::zero), random(666), cloudProperties_ ( IOobject ( "cloudProperties", U.time().constant(), U.db(), IOobject::MUST_READ, IOobject::NO_WRITE ) ), interpolationSchemes_(cloudProperties_.subDict("interpolationSchemes")) { g_=cloudProperties_.lookup("g"); HardBallParticle::density=readScalar(cloudProperties_.lookup("density")); dragCoefficient_=readScalar(cloudProperties_.lookup("drag")); subCycles_=readScalar(cloudProperties_.lookup("subCycles")); useSourceMoment=readBool(cloudProperties_.lookup("useMomentumSource")); dictionary injection(cloudProperties_.subDict("injection")); thres=readScalar(injection.lookup("thres")); center=injection.lookup("center"); r0=readScalar(injection.lookup("r0")); vel0=readScalar(injection.lookup("vel0")); vel1=injection.lookup("vel1"); d0=readScalar(injection.lookup("d0")); d1=readScalar(injection.lookup("d1")); tStart=readScalar(injection.lookup("tStart")); tEnd=readScalar(injection.lookup("tEnd")); dictionary wall(cloudProperties_.subDict("wall")); wallReflect_=readBool(wall.lookup("reflect")); if(wallReflect_) { wallElasticity_=readScalar(wall.lookup("elasticity")); } }
void secforced::loadSection(std::stringstream& file) { readNulls(&file, 1); int namelength = readInt(&file); sName = QString(readString(&file, namelength).c_str()); bSpeed = true; iTime = readInt(&file); bOrientation = readBool(&file); bArgument = readBool(&file); rollFunc->loadFunction(file); normForce->loadFunction(file); latForce->loadFunction(file); }
dynamicFunctionObjectListProxy::dynamicFunctionObjectListProxy ( const word& name, const Time& t, const dictionary& dict, const char *providerNameStr ) : functionObjectListProxy( name, t, dict, false ) { word providerName(providerNameStr); if(providerName.size()==0) { providerName=word(dict.lookup("dictionaryProvider")); } provider_=dynamicDictionaryProvider::New( providerName, dict, (*this) ); if( readBool(dict.lookup("readDuringConstruction")) ) { if(writeDebug()) { Info << this->name() << " list initialized during construction" << endl; } read(dict); } }
void setOrderLines(Order *orders, int pos, Product *products, int *pCount) { bool val = true; do { setOrderProductId(orders, pos, products, pCount); readBool(&val, O_MSG_ADDMORE_LINES); } while(val == true); }
mappedPatchFieldBase<Type>::mappedPatchFieldBase ( const mappedPatchBase& mapper, const fvPatchField<Type>& patchField, const dictionary& dict ) : mapper_(mapper), patchField_(patchField), fieldName_ ( dict.template lookupOrDefault<word> ( "fieldName", patchField_.dimensionedInternalField().name() ) ), setAverage_(readBool(dict.lookup("setAverage"))), average_(pTraits<Type>(dict.lookup("average"))), interpolationScheme_(interpolationCell<Type>::typeName) { if (mapper_.mode() == mappedPatchBase::NEARESTCELL) { dict.lookup("interpolationScheme") >> interpolationScheme_; }
Foam:: timeVaryingMappedFixedValuePointPatchField<Type>:: timeVaryingMappedFixedValuePointPatchField ( const pointPatch& p, const DimensionedField<Type, pointMesh>& iF, const dictionary& dict ) : fixedValuePointPatchField<Type>(p, iF), fieldTableName_(iF.name()), setAverage_(readBool(dict.lookup("setAverage"))), perturb_(dict.lookupOrDefault("perturb", 1E-5)), mapperPtr_(NULL), sampleTimes_(0), startSampleTime_(-1), startSampledValues_(0), startAverage_(pTraits<Type>::zero), endSampleTime_(-1), endSampledValues_(0), endAverage_(pTraits<Type>::zero) { dict.readIfPresent("fieldTableName", fieldTableName_); updateCoeffs(); }
Foam::Function1Types::CSV<Type>::CSV ( const word& entryName, const dictionary& dict ) : TableBase<Type>(entryName, dict), nHeaderLine_(readLabel(dict.lookup("nHeaderLine"))), refColumn_(readLabel(dict.lookup("refColumn"))), componentColumns_(dict.lookup("componentColumns")), separator_(dict.lookupOrDefault<string>("separator", string(","))[0]), mergeSeparators_(readBool(dict.lookup("mergeSeparators"))), fName_(dict.lookup("file")) { if (componentColumns_.size() != pTraits<Type>::nComponents) { FatalErrorInFunction << componentColumns_ << " does not have the expected length of " << pTraits<Type>::nComponents << endl << exit(FatalError); } read(); TableBase<Type>::check(); }
bool workflowControls::restartRequested() const { const dictionary& meshDict = mesh_.returnTime().lookupObject<dictionary>("meshDict"); if ( meshDict.found("workflowControls") && meshDict.isDict("workflowControls") ) { const dictionary& workflowControls = meshDict.subDict("workflowControls"); if( workflowControls.found("restartFromLatestStep") ) { const bool restart = readBool(workflowControls.lookup("restartFromLatestStep")); return restart; } } return false; }
timeVaryingMappedFixedValueFvPatchField<Type>:: timeVaryingMappedFixedValueFvPatchField ( const fvPatch& p, const DimensionedField<Type, volMesh>& iF, const dictionary& dict ) : fixedValueFvPatchField<Type>(p, iF), fieldTableName_(iF.name()), setAverage_(readBool(dict.lookup("setAverage"))), referenceCS_(NULL), nearestVertex_(0), nearestVertexWeight_(0), sampleTimes_(0), startSampleTime_(-1), startSampledValues_(0), startAverage_(pTraits<Type>::zero), endSampleTime_(-1), endSampledValues_(0), endAverage_(pTraits<Type>::zero) { if (debug) { Pout<< "timeVaryingMappedFixedValue : construct from dictionary" << endl; } if (dict.found("fieldTableName")) { dict.lookup("fieldTableName") >> fieldTableName_; }
bool panicDumpFunctionObject::start() { simpleFunctionObject::start(); fieldName_=word(dict_.lookup("fieldName")); minimum_=readScalar(dict_.lookup("minimum")); maximum_=readScalar(dict_.lookup("maximum")); Info << "Checking for field " << fieldName_ << " in range [ " << minimum_ << " , " << maximum_ << " ] " << endl; if(dict_.found("storeAndWritePreviousState")) { storeAndWritePreviousState_=readBool( dict_.lookup("storeAndWritePreviousState") ); if(storeAndWritePreviousState_) { Info << name() << " stores the previous time-steps" << endl; lastTimes_.set( new TimeCloneList( dict_ ) ); } } else { WarningIn("panicDumpFunctionObject::start()") << "storeAndWritePreviousState not set in" << dict_.name() << endl << "Assuming 'false'" << endl; } return true; }
timeVaryingMappedFixedValueFvPatchField<Type>:: timeVaryingMappedFixedValueFvPatchField ( const fvPatch& p, const DimensionedField<Type, volMesh>& iF, const dictionary& dict ) : fixedValueFvPatchField<Type>(p, iF), fieldTableName_(iF.name()), setAverage_(readBool(dict.lookup("setAverage"))), perturb_(dict.lookupOrDefault("perturb", 1E-5)), referenceCS_(NULL), nearestVertex_(0), nearestVertexWeight_(0), sampleTimes_(0), startSampleTime_(-1), startSampledValues_(0), startAverage_(pTraits<Type>::zero), endSampleTime_(-1), endSampledValues_(0), endAverage_(pTraits<Type>::zero) { dict.readIfPresent("fieldTableName", fieldTableName_); if (dict.found("value")) { fvPatchField<Type>::operator==(Field<Type>("value", dict, p.size())); } else { updateCoeffs(); } }
Foam::CSV<Type>::CSV ( const word& entryName, const dictionary& dict, const word& ext ) : DataEntry<Type>(entryName), TableBase<Type>(entryName, dict.subDict(type() + ext)), coeffs_(dict.subDict(type() + ext)), nHeaderLine_(readLabel(coeffs_.lookup("nHeaderLine"))), refColumn_(readLabel(coeffs_.lookup("refColumn"))), componentColumns_(coeffs_.lookup("componentColumns")), separator_(coeffs_.lookupOrDefault<string>("separator", string(","))[0]), mergeSeparators_(readBool(coeffs_.lookup("mergeSeparators"))), fName_(coeffs_.lookup("fileName")) { if (componentColumns_.size() != pTraits<Type>::nComponents) { FatalErrorIn("Foam::CSV<Type>::CSV(const word&, Istream&)") << componentColumns_ << " does not have the expected length of " << pTraits<Type>::nComponents << endl << exit(FatalError); } read(); TableBase<Type>::check(); }
void Foam::multiSolver::synchronizeParallel() const { if (Pstream::master()) { // Give go signal for ( int slave=Pstream::firstSlave(); slave<=Pstream::lastSlave(); slave++ ) { OPstream toSlave(Pstream::blocking, slave); toSlave << true; } } else { // Recieve go signal { IPstream fromMaster(Pstream::blocking, Pstream::masterNo()); readBool(fromMaster); } } }
IsoMediaFile::Property WriterConfig::readProperty(const Json::Value& propertyValues) const { IsoMediaFile::Property newProperty; for (const auto& irot : propertyValues["irot"]) { IsoMediaFile::Irot newIrot; newIrot.essential = readBool(irot["essential"], true); newIrot.angle = readUint32(irot, "angle"); newIrot.uniq_bsid = readOptionalUint(irot["uniq_bsid"]); newIrot.refs_list = parseRefsList(irot["refs_list"]); newIrot.idxs_list = parseIndexList(irot["idxs_list"]); newProperty.irots.push_back(newIrot); } for (const auto& rloc : propertyValues["rloc"]) { IsoMediaFile::Rloc newRloc; newRloc.essential = readBool(rloc["essential"], true); newRloc.horizontal_offset = readUint32(rloc, "horizontal_offset"); newRloc.vertical_offset = readUint32(rloc, "vertical_offset"); newRloc.uniq_bsid = readOptionalUint(rloc["uniq_bsid"]); newRloc.refs_list = parseRefsList(rloc["refs_list"]); newRloc.idxs_list = parseIndexList(rloc["idxs_list"]); newProperty.rlocs.push_back(newRloc); } for (const auto& clap : propertyValues["clap"]) { IsoMediaFile::Clap newClap; newClap.essential = readBool(clap["essential"], true); newClap.clapWidthN = readUint32(clap, "clapWidthN"); newClap.clapWidthD = readUint32(clap, "clapWidthD"); newClap.clapHeightN = readUint32(clap, "clapHeightN"); newClap.clapHeightD = readUint32( clap, "clapHeightD"); newClap.horizOffN = readUint32(clap, "horizOffN"); newClap.horizOffD = readUint32(clap, "horizOffD"); newClap.vertOffN = readUint32(clap, "vertOffN"); newClap.vertOffD = readUint32(clap, "vertOffD"); newClap.uniq_bsid = readOptionalUint(clap["uniq_bsid"]); newClap.refs_list = parseRefsList(clap["refs_list"]); newClap.idxs_list = parseIndexList(clap["idxs_list"]); newProperty.claps.push_back(newClap); } return newProperty; }
Foam::timeVaryingMappedFixedValueFvPatchField<Type>:: timeVaryingMappedFixedValueFvPatchField ( const fvPatch& p, const DimensionedField<Type, volMesh>& iF, const dictionary& dict ) : fixedValueFvPatchField<Type>(p, iF), fieldTableName_(iF.name()), setAverage_(readBool(dict.lookup("setAverage"))), perturb_(dict.lookupOrDefault("perturb", 1e-5)), mapMethod_ ( dict.lookupOrDefault<word> ( "mapMethod", "planarInterpolation" ) ), mapperPtr_(NULL), sampleTimes_(0), startSampleTime_(-1), startSampledValues_(0), startAverage_(Zero), endSampleTime_(-1), endSampledValues_(0), endAverage_(Zero), offset_(Function1<Type>::New("offset", dict)) { if ( mapMethod_ != "planarInterpolation" && mapMethod_ != "nearest" ) { FatalIOErrorInFunction ( dict ) << "mapMethod should be one of 'planarInterpolation'" << ", 'nearest'" << exit(FatalIOError); } dict.readIfPresent("fieldTableName", fieldTableName_); if (dict.found("value")) { fvPatchField<Type>::operator==(Field<Type>("value", dict, p.size())); } else { // Note: we use evaluate() here to trigger updateCoeffs followed // by re-setting of fvatchfield::updated_ flag. This is // so if first use is in the next time step it retriggers // a new update. this->evaluate(Pstream::blocking); } }
bool Protocol::accely(int idx, double val){ //Set the desired accelaration in x axis to the specified value in hovertank mode. char char_buff[20]; sprintf(char_buff, "accely %d %f", idx, val); sendLine(char_buff); readAck(); return readBool(); }
bool Protocol::speed(int idx, double val){ //Set the desired speed to the specified value. char char_buff[20]; sprintf(char_buff, "speed %d %f", idx, val); sendLine(char_buff); readAck(); return readBool(); }
//Tank commands bool Protocol::shoot(int idx){ //Perform a shoot request. char char_buff[20]; sprintf(char_buff, "shoot %d", idx); sendLine(char_buff); readAck(); return readBool(); }
void seccurved::loadSection(std::stringstream& file) { bSpeed = readBool(&file); int namelength = readInt(&file); sName = QString(readString(&file, namelength).c_str()); fVel = readFloat(&file); fAngle = readFloat(&file); fRadius = readFloat(&file); fDirection = readFloat(&file); fLeadIn = readFloat(&file); fLeadOut = readFloat(&file); bOrientation = readBool(&file); rollFunc->loadFunction(file); }
bool Protocol::angvel(int idx, double val){ //Set the desired angular velocity to the specified value. char char_buff[20]; sprintf(char_buff, "angvel %d %f", idx, val); sendLine(char_buff); readAck(); return readBool(); }
shared_ptr<IfcBoolean> IfcBoolean::createObjectFromSTEP( const std::wstring& arg, const std::map<int,shared_ptr<BuildingEntity> >& map ) { if( arg.compare( L"$" ) == 0 ) { return shared_ptr<IfcBoolean>(); } else if( arg.compare( L"*" ) == 0 ) { return shared_ptr<IfcBoolean>(); } shared_ptr<IfcBoolean> type_object( new IfcBoolean() ); readBool( arg, type_object->m_value ); return type_object; }
bool executeIfParallelSerialFunctionObject::read(const dictionary& dict) { runIfParallel_= readBool( dict.lookup("runIfParallel") ); return conditionalFunctionObjectListProxy::read(dict); }
bool correctThermoFunctionObject::start() { updateRho_=readBool(dict_.lookup("updateRho")); if(updateRho_) { rhoName_=word(dict_.lookup("rhoName")); } return updateSimpleFunctionObject::start(); }
singleStepCombustion<CombThermoType, ThermoType>::singleStepCombustion ( const word& modelType, const fvMesh& mesh ) : CombThermoType(modelType, mesh), singleMixturePtr_(NULL), wFuel_ ( IOobject ( "wFuel", this->mesh().time().timeName(), this->mesh(), IOobject::NO_READ, IOobject::NO_WRITE ), this->mesh(), dimensionedScalar("zero", dimMass/dimVolume/dimTime, 0.0) ), semiImplicit_(readBool(this->coeffs_.lookup("semiImplicit"))) { if (isA<singleStepReactingMixture<ThermoType> >(this->thermo())) { singleMixturePtr_ = &dynamic_cast<singleStepReactingMixture<ThermoType>&> ( this->thermo() ); } else { FatalErrorIn ( "singleStepCombustion<CombThermoType, ThermoType>::" "singleStepCombustion" "(" "const word&, " "const fvMesh&" ")" ) << "Inconsistent thermo package for " << this->type() << " model:\n" << " " << this->thermo().type() << nl << nl << "Please select a thermo package based on " << "singleStepReactingMixture" << exit(FatalError); } if (semiImplicit_) { Info<< "Combustion mode: semi-implicit" << endl; } else { Info<< "Combustion mode: explicit" << endl; } }
// Construct from components virtualMassForce::virtualMassForce ( const dictionary& dict, cfdemCloud& sm ) : forceModel(dict,sm), propsDict_(dict.subDict(typeName + "Props")), velFieldName_(propsDict_.lookup("velFieldName")), U_(sm.mesh().lookupObject<volVectorField> (velFieldName_)), phiFieldName_(propsDict_.lookup("phiFieldName")), phi_(sm.mesh().lookupObject<surfaceScalarField> (phiFieldName_)), UrelOld_(NULL), splitUrelCalculation_(false), Cadd_(0.5) { if (particleCloud_.dataExchangeM().maxNumberOfParticles() > 0) { // get memory for 2d array particleCloud_.dataExchangeM().allocateArray(UrelOld_,NOTONCPU,3); } // init force sub model setForceSubModels(propsDict_); // define switches which can be read from dict forceSubM(0).setSwitchesList(0,true); // activate treatExplicit switch forceSubM(0).setSwitchesList(4,true); // activate search for interpolate switch forceSubM(0).readSwitches(); //Extra switches/settings if(propsDict_.found("splitUrelCalculation")) { splitUrelCalculation_ = readBool(propsDict_.lookup("splitUrelCalculation")); if(splitUrelCalculation_) Info << "Virtual mass model: will split the Urel calculation\n"; Info << "WARNING: be sure that LIGGGHTS integration takes ddtv_p implicitly into account! \n"; } if(propsDict_.found("Cadd")) { Cadd_ = readScalar(propsDict_.lookup("Cadd")); Info << "Virtual mass model: using non-standard Cadd = " << Cadd_ << endl; } particleCloud_.checkCG(true); //Append the field names to be probed particleCloud_.probeM().initialize(typeName, "virtualMass.logDat"); particleCloud_.probeM().vectorFields_.append("virtualMassForce"); //first entry must the be the force particleCloud_.probeM().vectorFields_.append("Urel"); particleCloud_.probeM().vectorFields_.append("UrelOld"); particleCloud_.probeM().vectorFields_.append("ddtUrel"); particleCloud_.probeM().scalarFields_.append("Vs"); particleCloud_.probeM().scalarFields_.append("rho"); particleCloud_.probeM().writeHeader(); }